From 85502b4e2c8b7459d9e8e44a0de384ba28c3f34e Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 12:46:09 +0000 Subject: [PATCH 01/32] feat: add end-user `infrahub` CLI for CRUD operations and schema discovery MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a new `infrahub` command as a separate entry point alongside `infrahubctl`. While `infrahubctl` targets developers building on top of Infrahub, the new `infrahub` command targets end users who need to query, create, update, and delete data in the Infrahub database. Commands: - `infrahub get [identifier]` — list or detail view with table/JSON/CSV/YAML output - `infrahub create ` — create objects via --set flags or --file (JSON/YAML) - `infrahub update ` — update objects via --set flags or --file - `infrahub delete ` — delete with confirmation prompt (--yes to skip) - `infrahub schema list/show` — discover available kinds and their attributes Key features: - Four output formats: table (default TTY), JSON (default piped), CSV, YAML - YAML output uses Infrahub Object format (apiVersion/kind/spec) for round-tripping - Unified --set flag for both attributes and relationships - Reuses existing SDK client, config, and AsyncTyper infrastructure - 87 unit tests, 16 integration tests --- .../sdk_ref/infrahub_sdk/client.mdx | 216 +++++++------- .../sdk_ref/infrahub_sdk/node/attribute.mdx | 8 +- .../sdk_ref/infrahub_sdk/node/metadata.mdx | 4 +- .../sdk_ref/infrahub_sdk/node/node.mdx | 110 +++---- .../sdk_ref/infrahub_sdk/node/parsers.mdx | 2 +- .../sdk_ref/infrahub_sdk/node/property.mdx | 2 +- .../infrahub_sdk/node/related_node.mdx | 38 +-- .../infrahub_sdk/node/relationship.mdx | 32 +- infrahub_sdk/ctl/commands/__init__.py | 3 + infrahub_sdk/ctl/commands/create.py | 64 ++++ infrahub_sdk/ctl/commands/delete.py | 46 +++ infrahub_sdk/ctl/commands/get.py | 75 +++++ infrahub_sdk/ctl/commands/schema.py | 132 +++++++++ infrahub_sdk/ctl/commands/update.py | 143 +++++++++ infrahub_sdk/ctl/enduser_cli.py | 19 ++ infrahub_sdk/ctl/enduser_commands.py | 54 ++++ infrahub_sdk/ctl/formatters/__init__.py | 76 +++++ infrahub_sdk/ctl/formatters/base.py | 165 +++++++++++ infrahub_sdk/ctl/formatters/csv.py | 91 ++++++ infrahub_sdk/ctl/formatters/json.py | 51 ++++ infrahub_sdk/ctl/formatters/table.py | 113 +++++++ infrahub_sdk/ctl/formatters/yaml.py | 132 +++++++++ infrahub_sdk/ctl/parsers.py | 86 ++++++ pyproject.toml | 1 + .../checklists/requirements.md | 36 +++ .../contracts/cli-commands.md | 61 ++++ specs/001-end-user-cli/data-model.md | 48 +++ specs/001-end-user-cli/plan.md | 113 +++++++ specs/001-end-user-cli/quickstart.md | 116 ++++++++ specs/001-end-user-cli/research.md | 104 +++++++ specs/001-end-user-cli/spec.md | 152 ++++++++++ specs/001-end-user-cli/tasks.md | 277 ++++++++++++++++++ tests/integration/test_enduser_cli.py | 237 +++++++++++++++ tests/unit/ctl/commands/__init__.py | 0 tests/unit/ctl/commands/test_create.py | 28 ++ tests/unit/ctl/commands/test_delete.py | 16 + tests/unit/ctl/commands/test_get.py | 113 +++++++ tests/unit/ctl/commands/test_schema.py | 23 ++ tests/unit/ctl/commands/test_update.py | 31 ++ tests/unit/ctl/formatters/__init__.py | 0 tests/unit/ctl/formatters/test_csv.py | 247 ++++++++++++++++ tests/unit/ctl/formatters/test_json.py | 201 +++++++++++++ tests/unit/ctl/formatters/test_table.py | 210 +++++++++++++ tests/unit/ctl/formatters/test_yaml.py | 206 +++++++++++++ tests/unit/ctl/test_parsers.py | 128 ++++++++ 45 files changed, 3804 insertions(+), 206 deletions(-) create mode 100644 infrahub_sdk/ctl/commands/__init__.py create mode 100644 infrahub_sdk/ctl/commands/create.py create mode 100644 infrahub_sdk/ctl/commands/delete.py create mode 100644 infrahub_sdk/ctl/commands/get.py create mode 100644 infrahub_sdk/ctl/commands/schema.py create mode 100644 infrahub_sdk/ctl/commands/update.py create mode 100644 infrahub_sdk/ctl/enduser_cli.py create mode 100644 infrahub_sdk/ctl/enduser_commands.py create mode 100644 infrahub_sdk/ctl/formatters/__init__.py create mode 100644 infrahub_sdk/ctl/formatters/base.py create mode 100644 infrahub_sdk/ctl/formatters/csv.py create mode 100644 infrahub_sdk/ctl/formatters/json.py create mode 100644 infrahub_sdk/ctl/formatters/table.py create mode 100644 infrahub_sdk/ctl/formatters/yaml.py create mode 100644 infrahub_sdk/ctl/parsers.py create mode 100644 specs/001-end-user-cli/checklists/requirements.md create mode 100644 specs/001-end-user-cli/contracts/cli-commands.md create mode 100644 specs/001-end-user-cli/data-model.md create mode 100644 specs/001-end-user-cli/plan.md create mode 100644 specs/001-end-user-cli/quickstart.md create mode 100644 specs/001-end-user-cli/research.md create mode 100644 specs/001-end-user-cli/spec.md create mode 100644 specs/001-end-user-cli/tasks.md create mode 100644 tests/integration/test_enduser_cli.py create mode 100644 tests/unit/ctl/commands/__init__.py create mode 100644 tests/unit/ctl/commands/test_create.py create mode 100644 tests/unit/ctl/commands/test_delete.py create mode 100644 tests/unit/ctl/commands/test_get.py create mode 100644 tests/unit/ctl/commands/test_schema.py create mode 100644 tests/unit/ctl/commands/test_update.py create mode 100644 tests/unit/ctl/formatters/__init__.py create mode 100644 tests/unit/ctl/formatters/test_csv.py create mode 100644 tests/unit/ctl/formatters/test_json.py create mode 100644 tests/unit/ctl/formatters/test_table.py create mode 100644 tests/unit/ctl/formatters/test_yaml.py create mode 100644 tests/unit/ctl/test_parsers.py diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx index 7b47c99c..0b91cc46 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx @@ -7,13 +7,13 @@ sidebarTitle: client ## Classes -### `InfrahubClient` +### `InfrahubClient` GraphQL Client to interact with Infrahub. **Methods:** -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType | None @@ -22,50 +22,50 @@ get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timest
Show 6 other overloads -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType ``` -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode | None ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNode | SchemaType | None ```
-#### `delete` +#### `delete` ```python delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None ``` -#### `create` +#### `create` ```python create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNode | SchemaType @@ -74,20 +74,20 @@ create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str
Show 2 other overloads -#### `create` +#### `create` ```python create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNode ``` -#### `create` +#### `create` ```python create(self, kind: type[SchemaType], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaType ```
-#### `get_version` +#### `get_version` ```python get_version(self) -> str @@ -95,7 +95,7 @@ get_version(self) -> str Return the Infrahub version. -#### `get_user` +#### `get_user` ```python get_user(self) -> dict @@ -103,7 +103,7 @@ get_user(self) -> dict Return user information -#### `get_user_permissions` +#### `get_user_permissions` ```python get_user_permissions(self) -> dict @@ -111,7 +111,7 @@ get_user_permissions(self) -> dict Return user permissions -#### `count` +#### `count` ```python count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int @@ -119,7 +119,7 @@ count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: s Return the number of nodes of a given kind. -#### `all` +#### `all` ```python all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaType] @@ -128,13 +128,13 @@ all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None
Show 2 other overloads -#### `all` +#### `all` ```python all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNode] ``` -#### `all` +#### `all` ```python all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNode] | list[SchemaType] @@ -164,7 +164,7 @@ Retrieve all nodes of a given kind - list\[InfrahubNode]: List of Nodes
-#### `filters` +#### `filters` ```python filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaType] @@ -173,13 +173,13 @@ filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str |
Show 2 other overloads -#### `filters` +#### `filters` ```python filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNode] ``` -#### `filters` +#### `filters` ```python filters(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNode] | list[SchemaType] @@ -211,7 +211,7 @@ Retrieve nodes of a given kind based on provided filters. - list\[InfrahubNodeSync]: List of Nodes that match the given filters.
-#### `clone` +#### `clone` ```python clone(self, branch: str | None = None) -> InfrahubClient @@ -219,7 +219,7 @@ clone(self, branch: str | None = None) -> InfrahubClient Return a cloned version of the client using the same configuration -#### `execute_graphql` +#### `execute_graphql` ```python execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict @@ -247,37 +247,37 @@ If retry_on_failure is True, the query will retry until the server becomes reach - The GraphQL data payload (response["data"]). -#### `refresh_login` +#### `refresh_login` ```python refresh_login(self) -> None ``` -#### `login` +#### `login` ```python login(self, refresh: bool = False) -> None ``` -#### `query_gql_query` +#### `query_gql_query` ```python query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict ``` -#### `create_diff` +#### `create_diff` ```python create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str ``` -#### `get_diff_summary` +#### `get_diff_summary` ```python get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] ``` -#### `get_diff_tree` +#### `get_diff_tree` ```python get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None @@ -287,7 +287,7 @@ Get complete diff tree with metadata and nodes. Returns None if no diff exists. -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType @@ -296,37 +296,37 @@ allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType],
Show 6 other overloads -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None @@ -350,7 +350,7 @@ Returns: InfrahubNode: Node corresponding to the allocated resource.
-#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType @@ -359,37 +359,37 @@ allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], i
Show 6 other overloads -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None @@ -414,25 +414,25 @@ Returns: InfrahubNode: Node corresponding to the allocated resource.
-#### `create_batch` +#### `create_batch` ```python create_batch(self, return_exceptions: bool = False) -> InfrahubBatch ``` -#### `get_list_repositories` +#### `get_list_repositories` ```python get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] ``` -#### `repository_update_commit` +#### `repository_update_commit` ```python repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool ``` -#### `convert_object_type` +#### `convert_object_type` ```python convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode @@ -443,11 +443,11 @@ and its values indicate how to fill in these fields. Any mandatory field not hav in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type for more information. -### `InfrahubClientSync` +### `InfrahubClientSync` **Methods:** -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync | None @@ -456,50 +456,50 @@ get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Ti
Show 6 other overloads -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync ``` -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync | None ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self, kind: str | type[SchemaTypeSync], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync | None ```
-#### `delete` +#### `delete` ```python delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None ``` -#### `create` +#### `create` ```python create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync @@ -508,20 +508,20 @@ create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch:
Show 2 other overloads -#### `create` +#### `create` ```python create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `create` +#### `create` ```python create(self, kind: type[SchemaTypeSync], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaTypeSync ```
-#### `get_version` +#### `get_version` ```python get_version(self) -> str @@ -529,7 +529,7 @@ get_version(self) -> str Return the Infrahub version. -#### `get_user` +#### `get_user` ```python get_user(self) -> dict @@ -537,7 +537,7 @@ get_user(self) -> dict Return user information -#### `get_user_permissions` +#### `get_user_permissions` ```python get_user_permissions(self) -> dict @@ -545,7 +545,7 @@ get_user_permissions(self) -> dict Return user permissions -#### `clone` +#### `clone` ```python clone(self, branch: str | None = None) -> InfrahubClientSync @@ -553,7 +553,7 @@ clone(self, branch: str | None = None) -> InfrahubClientSync Return a cloned version of the client using the same configuration -#### `execute_graphql` +#### `execute_graphql` ```python execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict @@ -582,7 +582,7 @@ GraphQL errors always raise `GraphQLError`. Defaults to None. - The GraphQL data payload (`response["data"]`). -#### `count` +#### `count` ```python count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int @@ -590,7 +590,7 @@ count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: s Return the number of nodes of a given kind. -#### `all` +#### `all` ```python all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaTypeSync] @@ -599,13 +599,13 @@ all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str |
Show 2 other overloads -#### `all` +#### `all` ```python all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNodeSync] ``` -#### `all` +#### `all` ```python all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNodeSync] | list[SchemaTypeSync] @@ -635,7 +635,7 @@ Retrieve all nodes of a given kind - list\[InfrahubNodeSync]: List of Nodes
-#### `filters` +#### `filters` ```python filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaTypeSync] @@ -644,13 +644,13 @@ filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: st
Show 2 other overloads -#### `filters` +#### `filters` ```python filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNodeSync] ``` -#### `filters` +#### `filters` ```python filters(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNodeSync] | list[SchemaTypeSync] @@ -682,7 +682,7 @@ Retrieve nodes of a given kind based on provided filters. - list\[InfrahubNodeSync]: List of Nodes that match the given filters.
-#### `create_batch` +#### `create_batch` ```python create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync @@ -693,31 +693,31 @@ Create a batch to execute multiple queries concurrently. Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such batch to manipulate objects that depend on each others. -#### `get_list_repositories` +#### `get_list_repositories` ```python get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] ``` -#### `query_gql_query` +#### `query_gql_query` ```python query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict ``` -#### `create_diff` +#### `create_diff` ```python create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str ``` -#### `get_diff_summary` +#### `get_diff_summary` ```python get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] ``` -#### `get_diff_tree` +#### `get_diff_tree` ```python get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None @@ -727,7 +727,7 @@ Get complete diff tree with metadata and nodes. Returns None if no diff exists. -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync @@ -736,37 +736,37 @@ allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTyp
Show 6 other overloads -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None @@ -790,7 +790,7 @@ Returns: InfrahubNodeSync: Node corresponding to the allocated resource.
-#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync @@ -799,37 +799,37 @@ allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaType
Show 6 other overloads -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None @@ -854,25 +854,25 @@ Returns: InfrahubNodeSync: Node corresponding to the allocated resource.
-#### `repository_update_commit` +#### `repository_update_commit` ```python repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool ``` -#### `refresh_login` +#### `refresh_login` ```python refresh_login(self) -> None ``` -#### `login` +#### `login` ```python login(self, refresh: bool = False) -> None ``` -#### `convert_object_type` +#### `convert_object_type` ```python convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync @@ -883,39 +883,39 @@ and its values indicate how to fill in these fields. Any mandatory field not hav in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type for more information. -### `ProcessRelationsNode` +### `ProcessRelationsNode` -### `ProxyConfig` +### `ProxyConfig` -### `ProxyConfigSync` +### `ProxyConfigSync` -### `ProcessRelationsNodeSync` +### `ProcessRelationsNodeSync` -### `BaseClient` +### `BaseClient` Base class for InfrahubClient and InfrahubClientSync **Methods:** -#### `request_context` +#### `request_context` ```python request_context(self) -> RequestContext | None ``` -#### `request_context` +#### `request_context` ```python request_context(self, request_context: RequestContext) -> None ``` -#### `start_tracking` +#### `start_tracking` ```python start_tracking(self, identifier: str | None = None, params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> Self ``` -#### `set_context_properties` +#### `set_context_properties` ```python set_context_properties(self, identifier: str, params: dict[str, str] | None = None, delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> None @@ -923,19 +923,19 @@ set_context_properties(self, identifier: str, params: dict[str, str] | None = No ## Functions -### `handle_relogin` +### `handle_relogin` ```python handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]) -> Callable[..., Coroutine[Any, Any, httpx.Response]] ``` -### `handle_relogin_sync` +### `handle_relogin_sync` ```python handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response] ``` -### `raise_for_error_deprecation_warning` +### `raise_for_error_deprecation_warning` ```python raise_for_error_deprecation_warning(value: bool | None) -> None diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx index d08c7fc5..1743230f 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx @@ -7,25 +7,25 @@ sidebarTitle: attribute ## Classes -### `Attribute` +### `Attribute` Represents an attribute of a Node, including its schema, value, and properties. **Methods:** -#### `value` +#### `value` ```python value(self) -> Any ``` -#### `value` +#### `value` ```python value(self, value: Any) -> None ``` -#### `is_from_pool_attribute` +#### `is_from_pool_attribute` ```python is_from_pool_attribute(self) -> bool diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx index 6175236f..5306c66f 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx @@ -7,10 +7,10 @@ sidebarTitle: metadata ## Classes -### `NodeMetadata` +### `NodeMetadata` Represents metadata about a node (created_at, created_by, updated_at, updated_by). -### `RelationshipMetadata` +### `RelationshipMetadata` Represents metadata about a relationship edge (updated_at, updated_by). diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx index e23120dd..c3bc30c6 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx @@ -7,37 +7,37 @@ sidebarTitle: node ## Classes -### `InfrahubNode` +### `InfrahubNode` Represents a Infrahub node in an asynchronous context. **Methods:** -#### `from_graphql` +#### `from_graphql` ```python from_graphql(cls, client: InfrahubClient, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self ``` -#### `generate` +#### `generate` ```python generate(self, nodes: list[str] | None = None) -> None ``` -#### `artifact_generate` +#### `artifact_generate` ```python artifact_generate(self, name: str) -> None ``` -#### `artifact_fetch` +#### `artifact_fetch` ```python artifact_fetch(self, name: str) -> str | dict[str, Any] ``` -#### `download_file` +#### `download_file` ```python download_file(self, dest: Path | None = None) -> bytes | int @@ -75,25 +75,25 @@ The node must have been saved (have an id) before calling this method. >>> bytes_written = await contract.download_file(dest=Path("/tmp/contract.pdf")) ``` -#### `delete` +#### `delete` ```python delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `save` +#### `save` ```python save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `generate_query_data` +#### `generate_query_data` ```python generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] ``` -#### `generate_query_data_node` +#### `generate_query_data_node` ```python generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] @@ -115,31 +115,31 @@ Generate the node part of a GraphQL Query with attributes and nodes. - dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format -#### `add_relationships` +#### `add_relationships` ```python add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `remove_relationships` +#### `remove_relationships` ```python remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `create` +#### `create` ```python create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `update` +#### `update` ```python update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `get_pool_allocated_resources` +#### `get_pool_allocated_resources` ```python get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode] @@ -155,7 +155,7 @@ Fetch all nodes that were allocated for the pool and a given resource. - list\[InfrahubNode]: The allocated nodes. -#### `get_pool_resources_utilization` +#### `get_pool_resources_utilization` ```python get_pool_resources_utilization(self) -> list[dict[str, Any]] @@ -167,7 +167,7 @@ Fetch the utilization of each resource for the pool. - list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. -#### `get_flat_value` +#### `get_flat_value` ```python get_flat_value(self, key: str, separator: str = '__') -> Any @@ -180,7 +180,7 @@ Query recursively a value defined in a flat notation (string), on a hierarchy of name__value module.object.value -#### `extract` +#### `extract` ```python extract(self, params: dict[str, str]) -> dict[str, Any] @@ -188,37 +188,37 @@ extract(self, params: dict[str, str]) -> dict[str, Any] Extract some data points defined in a flat notation. -### `InfrahubNodeSync` +### `InfrahubNodeSync` Represents a Infrahub node in a synchronous context. **Methods:** -#### `from_graphql` +#### `from_graphql` ```python from_graphql(cls, client: InfrahubClientSync, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self ``` -#### `generate` +#### `generate` ```python generate(self, nodes: list[str] | None = None) -> None ``` -#### `artifact_generate` +#### `artifact_generate` ```python artifact_generate(self, name: str) -> None ``` -#### `artifact_fetch` +#### `artifact_fetch` ```python artifact_fetch(self, name: str) -> str | dict[str, Any] ``` -#### `download_file` +#### `download_file` ```python download_file(self, dest: Path | None = None) -> bytes | int @@ -256,25 +256,25 @@ The node must have been saved (have an id) before calling this method. >>> bytes_written = contract.download_file(dest=Path("/tmp/contract.pdf")) ``` -#### `delete` +#### `delete` ```python delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `save` +#### `save` ```python save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `generate_query_data` +#### `generate_query_data` ```python generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] ``` -#### `generate_query_data_node` +#### `generate_query_data_node` ```python generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] @@ -296,31 +296,31 @@ Generate the node part of a GraphQL Query with attributes and nodes. - dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format -#### `add_relationships` +#### `add_relationships` ```python add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `remove_relationships` +#### `remove_relationships` ```python remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `create` +#### `create` ```python create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `update` +#### `update` ```python update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `get_pool_allocated_resources` +#### `get_pool_allocated_resources` ```python get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync] @@ -336,7 +336,7 @@ Fetch all nodes that were allocated for the pool and a given resource. - list\[InfrahubNodeSync]: The allocated nodes. -#### `get_pool_resources_utilization` +#### `get_pool_resources_utilization` ```python get_pool_resources_utilization(self) -> list[dict[str, Any]] @@ -348,7 +348,7 @@ Fetch the utilization of each resource for the pool. - list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. -#### `get_flat_value` +#### `get_flat_value` ```python get_flat_value(self, key: str, separator: str = '__') -> Any @@ -361,7 +361,7 @@ Query recursively a value defined in a flat notation (string), on a hierarchy of name__value module.object.value -#### `extract` +#### `extract` ```python extract(self, params: dict[str, str]) -> dict[str, Any] @@ -369,49 +369,49 @@ extract(self, params: dict[str, str]) -> dict[str, Any] Extract some data points defined in a flat notation. -### `InfrahubNodeBase` +### `InfrahubNodeBase` Base class for InfrahubNode and InfrahubNodeSync **Methods:** -#### `get_branch` +#### `get_branch` ```python get_branch(self) -> str ``` -#### `get_path_value` +#### `get_path_value` ```python get_path_value(self, path: str) -> Any ``` -#### `get_human_friendly_id` +#### `get_human_friendly_id` ```python get_human_friendly_id(self) -> list[str] | None ``` -#### `get_human_friendly_id_as_string` +#### `get_human_friendly_id_as_string` ```python get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None ``` -#### `hfid` +#### `hfid` ```python hfid(self) -> list[str] | None ``` -#### `hfid_str` +#### `hfid_str` ```python hfid_str(self) -> str | None ``` -#### `get_node_metadata` +#### `get_node_metadata` ```python get_node_metadata(self) -> NodeMetadata | None @@ -419,37 +419,37 @@ get_node_metadata(self) -> NodeMetadata | None Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched. -#### `get_kind` +#### `get_kind` ```python get_kind(self) -> str ``` -#### `get_all_kinds` +#### `get_all_kinds` ```python get_all_kinds(self) -> list[str] ``` -#### `is_ip_prefix` +#### `is_ip_prefix` ```python is_ip_prefix(self) -> bool ``` -#### `is_ip_address` +#### `is_ip_address` ```python is_ip_address(self) -> bool ``` -#### `is_resource_pool` +#### `is_resource_pool` ```python is_resource_pool(self) -> bool ``` -#### `is_file_object` +#### `is_file_object` ```python is_file_object(self) -> bool @@ -457,7 +457,7 @@ is_file_object(self) -> bool Check if this node inherits from CoreFileObject and supports file uploads. -#### `upload_from_path` +#### `upload_from_path` ```python upload_from_path(self, path: Path) -> None @@ -475,7 +475,7 @@ The file will be streamed during upload, avoiding loading the entire file into m - `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject. -#### `upload_from_bytes` +#### `upload_from_bytes` ```python upload_from_bytes(self, content: bytes | BinaryIO, name: str) -> None @@ -505,7 +505,7 @@ Using BinaryIO is recommended for large content to stream during upload. ... node.upload_from_bytes(content=f, name="file.bin") ``` -#### `clear_file` +#### `clear_file` ```python clear_file(self) -> None @@ -513,13 +513,13 @@ clear_file(self) -> None Clear any pending file content. -#### `get_raw_graphql_data` +#### `get_raw_graphql_data` ```python get_raw_graphql_data(self) -> dict | None ``` -#### `generate_query_data_init` +#### `generate_query_data_init` ```python generate_query_data_init(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx index f70c6788..74d79816 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx @@ -7,7 +7,7 @@ sidebarTitle: parsers ## Functions -### `parse_human_friendly_id` +### `parse_human_friendly_id` ```python parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]] diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx index a7400483..d4ace16d 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx @@ -7,6 +7,6 @@ sidebarTitle: property ## Classes -### `NodeProperty` +### `NodeProperty` Represents a property of a node, typically used for metadata like display labels. diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx index edc1112c..8e03ce0f 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx @@ -7,61 +7,61 @@ sidebarTitle: related_node ## Classes -### `RelatedNodeBase` +### `RelatedNodeBase` Base class for representing a related node in a relationship. **Methods:** -#### `id` +#### `id` ```python id(self) -> str | None ``` -#### `hfid` +#### `hfid` ```python hfid(self) -> list[Any] | None ``` -#### `hfid_str` +#### `hfid_str` ```python hfid_str(self) -> str | None ``` -#### `is_resource_pool` +#### `is_resource_pool` ```python is_resource_pool(self) -> bool ``` -#### `initialized` +#### `initialized` ```python initialized(self) -> bool ``` -#### `display_label` +#### `display_label` ```python display_label(self) -> str | None ``` -#### `typename` +#### `typename` ```python typename(self) -> str | None ``` -#### `kind` +#### `kind` ```python kind(self) -> str | None ``` -#### `is_from_profile` +#### `is_from_profile` ```python is_from_profile(self) -> bool @@ -69,7 +69,7 @@ is_from_profile(self) -> bool Return whether this relationship was set from a profile. Done by checking if the source is of a profile kind. -#### `get_relationship_metadata` +#### `get_relationship_metadata` ```python get_relationship_metadata(self) -> RelationshipMetadata | None @@ -77,49 +77,49 @@ get_relationship_metadata(self) -> RelationshipMetadata | None Returns the relationship metadata (updated_at, updated_by) if fetched. -### `RelatedNode` +### `RelatedNode` Represents a RelatedNodeBase in an asynchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self, timeout: int | None = None) -> None ``` -#### `peer` +#### `peer` ```python peer(self) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self) -> InfrahubNode ``` -### `RelatedNodeSync` +### `RelatedNodeSync` Represents a related node in a synchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self, timeout: int | None = None) -> None ``` -#### `peer` +#### `peer` ```python peer(self) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self) -> InfrahubNodeSync diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx index 567b7c8d..fec5df0e 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx @@ -7,37 +7,37 @@ sidebarTitle: relationship ## Classes -### `RelationshipManagerBase` +### `RelationshipManagerBase` Base class for RelationshipManager and RelationshipManagerSync **Methods:** -#### `peer_ids` +#### `peer_ids` ```python peer_ids(self) -> list[str] ``` -#### `peer_hfids` +#### `peer_hfids` ```python peer_hfids(self) -> list[list[Any]] ``` -#### `peer_hfids_str` +#### `peer_hfids_str` ```python peer_hfids_str(self) -> list[str] ``` -#### `has_update` +#### `has_update` ```python has_update(self) -> bool ``` -#### `is_from_profile` +#### `is_from_profile` ```python is_from_profile(self) -> bool @@ -45,19 +45,19 @@ is_from_profile(self) -> bool Return whether this relationship was set from a profile. All its peers must be from a profile. -### `RelationshipManager` +### `RelationshipManager` Manages relationships of a node in an asynchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self) -> None ``` -#### `add` +#### `add` ```python add(self, data: str | RelatedNode | dict) -> None @@ -65,7 +65,7 @@ add(self, data: str | RelatedNode | dict) -> None Add a new peer to this relationship. -#### `extend` +#### `extend` ```python extend(self, data: Iterable[str | RelatedNode | dict]) -> None @@ -73,25 +73,25 @@ extend(self, data: Iterable[str | RelatedNode | dict]) -> None Add new peers to this relationship. -#### `remove` +#### `remove` ```python remove(self, data: str | RelatedNode | dict) -> None ``` -### `RelationshipManagerSync` +### `RelationshipManagerSync` Manages relationships of a node in a synchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self) -> None ``` -#### `add` +#### `add` ```python add(self, data: str | RelatedNodeSync | dict) -> None @@ -99,7 +99,7 @@ add(self, data: str | RelatedNodeSync | dict) -> None Add a new peer to this relationship. -#### `extend` +#### `extend` ```python extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None @@ -107,7 +107,7 @@ extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None Add new peers to this relationship. -#### `remove` +#### `remove` ```python remove(self, data: str | RelatedNodeSync | dict) -> None diff --git a/infrahub_sdk/ctl/commands/__init__.py b/infrahub_sdk/ctl/commands/__init__.py new file mode 100644 index 00000000..9866927c --- /dev/null +++ b/infrahub_sdk/ctl/commands/__init__.py @@ -0,0 +1,3 @@ +"""Command modules for the ``infrahub`` end-user CLI.""" + +from __future__ import annotations diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py new file mode 100644 index 00000000..8e9f8034 --- /dev/null +++ b/infrahub_sdk/ctl/commands/create.py @@ -0,0 +1,64 @@ +"""Command implementation for ``infrahub create``. + +Creates a new object in Infrahub either from inline ``--set`` key=value +arguments or from a JSON/YAML object file specified via ``--file``. +""" + +from __future__ import annotations + +from pathlib import Path + +import typer +from rich.console import Console + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields +from infrahub_sdk.ctl.utils import catch_exception +from infrahub_sdk.spec.object import ObjectFile + +console = Console() + + +@catch_exception(console=console) +async def create_command( + kind: str = typer.Argument(..., help="Infrahub schema kind to create"), + set_args: list[str] | None = typer.Option(None, "--set", help="Field value in key=value format"), + file: Path | None = typer.Option(None, "--file", "-f", help="JSON or YAML file with object data"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Create a new object in Infrahub. + + Supports two mutually exclusive modes: inline field assignment via + repeatable ``--set key=value`` options, or bulk creation from a + JSON/YAML object file via ``--file``. + + Args: + kind: The Infrahub schema kind to create (e.g. ``InfraDevice``). + set_args: Repeatable ``key=value`` pairs for inline field assignment. + file: Path to a JSON or YAML object file. + branch: Target branch for the operation. + _: Configuration file parameter (handled by callback). + """ + if set_args and file: + raise typer.BadParameter("--set and --file are mutually exclusive. Use one or the other.") + if not set_args and not file: + raise typer.BadParameter("Provide either --set key=value pairs or --file .") + + client = initialize_client(branch=branch) + + if file: + files = ObjectFile.load_from_disk(paths=[file]) + for obj_file in files: + await obj_file.validate_format(client=client, branch=branch) + await obj_file.process(client=client, branch=branch) + object_count = len(obj_file.spec.data) + console.print(f"[green]Created {object_count} objects of kind {obj_file.spec.kind}") + else: + data = parse_set_args(set_args) # type: ignore[arg-type] + schema = await client.schema.get(kind=kind, branch=branch) + validate_set_fields(data, schema.attribute_names, schema.relationship_names) + node = await client.create(kind=kind, data=data, branch=branch) + await node.save(allow_upsert=True) + console.print(f"[green]Created {kind} '{node.display_label}' (id: {node.id})") diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py new file mode 100644 index 00000000..79e18b14 --- /dev/null +++ b/infrahub_sdk/ctl/commands/delete.py @@ -0,0 +1,46 @@ +"""``infrahub delete`` command -- delete an Infrahub object by ID or display name. + +Prompts for confirmation before deletion unless ``--yes`` is passed. +""" + +from __future__ import annotations + +import typer +from rich.console import Console + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.utils import catch_exception + +console = Console() + + +@catch_exception(console=console) +async def delete_command( + kind: str = typer.Argument(..., help="Infrahub schema kind"), + identifier: str = typer.Argument(..., help="Object ID or display name"), + yes: bool = typer.Option(False, "--yes", "-y", help="Skip confirmation prompt"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Delete an Infrahub object. + + Fetches the object identified by *identifier* (UUID or display name) and + deletes it. Unless ``--yes`` is provided, the user is prompted for + confirmation before the deletion is executed. + + Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: UUID or display name of the object to delete. + yes: When ``True``, skip the interactive confirmation prompt. + branch: Target branch name. + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + node = await client.get(kind=kind, id=identifier) + + if not yes: + typer.confirm(f"Delete {kind} '{node.display_label}'?", abort=True) + + await node.delete() + console.print(f"[green]Deleted {kind} '{node.display_label}' (id: {node.id})") diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py new file mode 100644 index 00000000..e7660792 --- /dev/null +++ b/infrahub_sdk/ctl/commands/get.py @@ -0,0 +1,75 @@ +"""``infrahub get`` command -- query and display Infrahub objects. + +Supports both list mode (all objects of a kind) and detail mode (a single +object by ID or display name). Output is auto-detected as ``table`` for +interactive terminals and ``json`` when piped, but can be overridden with +``--output``. +""" + +from __future__ import annotations + +from typing import Any + +import typer +from rich.console import Console + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.formatters import OutputFormat, detect_output_format, get_formatter +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_filter_args +from infrahub_sdk.ctl.utils import catch_exception + +console = Console() + + +@catch_exception(console=console) +async def get_command( + kind: str = typer.Argument(..., help="Infrahub schema kind to query"), + identifier: str | None = typer.Argument(None, help="Object ID or display name for detail view"), + filter_args: list[str] | None = typer.Option(None, "--filter", help="Filter in attr__value=x format"), + output: OutputFormat | None = typer.Option(None, "--output", "-o", help="Output format"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + limit: int | None = typer.Option(None, "--limit", help="Maximum results"), + offset: int | None = typer.Option(None, "--offset", help="Skip first N results"), + _: str = CONFIG_PARAM, +) -> None: + """Query and display Infrahub objects. + + When *identifier* is omitted, lists all objects of the given *kind*, + optionally filtered by ``--filter`` arguments. When *identifier* is + provided, displays a single object in detail view. + + Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: UUID or display name for single-object lookup. + filter_args: Repeatable filters in ``attr__value=x`` format. + output: Explicit output format; auto-detected when omitted. + branch: Target branch name. + limit: Maximum number of results to return. + offset: Number of results to skip (pagination). + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + schema = await client.schema.get(kind=kind, branch=branch) + + fmt = output or detect_output_format() + formatter = get_formatter(fmt) + + if identifier is not None: + node = await client.get(kind=kind, id=identifier) + result = formatter.format_detail(node, schema) + else: + filters: dict[str, Any] = parse_filter_args(filter_args or []) + nodes = await client.filters( + kind=kind, + **filters, + offset=offset, + limit=limit, + prefetch_relationships=True, + ) + result = formatter.format_list(nodes, schema) + + if fmt == OutputFormat.TABLE: + console.print(result, highlight=False) + else: + typer.echo(result) diff --git a/infrahub_sdk/ctl/commands/schema.py b/infrahub_sdk/ctl/commands/schema.py new file mode 100644 index 00000000..86990b1d --- /dev/null +++ b/infrahub_sdk/ctl/commands/schema.py @@ -0,0 +1,132 @@ +"""``infrahub schema`` subcommand group -- explore the Infrahub schema. + +Provides ``list`` and ``show`` subcommands for inspecting schema kinds +and their attributes and relationships. +""" + +from __future__ import annotations + +import typer +from rich.console import Console +from rich.table import Table + +from infrahub_sdk.async_typer import AsyncTyper +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.utils import catch_exception +from infrahub_sdk.schema import NodeSchemaAPI + +app = AsyncTyper() +console = Console() + + +@app.callback() +def callback() -> None: + """Explore the Infrahub schema.""" + + +@app.command(name="list") +@catch_exception(console=console) +async def schema_list( + filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name substring"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """List all available schema kinds. + + Fetches the full schema from the Infrahub instance and displays a + table of ``NodeSchemaAPI`` entries. Use ``--filter`` to narrow results + by a case-insensitive substring match on the kind name. + + Args: + filter_text: Optional substring to filter kind names. + branch: Target branch name. + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + schemas = await client.schema.all(branch=branch) + + items = list(schemas.values()) + if filter_text: + items = [s for s in items if filter_text.lower() in s.kind.lower()] + + items = [s for s in items if isinstance(s, NodeSchemaAPI)] + items.sort(key=lambda s: s.kind) + + table = Table(title="Schema Kinds") + table.add_column("Namespace") + table.add_column("Name") + table.add_column("Kind") + table.add_column("Description") + + for schema in items: + table.add_row( + schema.namespace, + schema.name, + schema.kind, + schema.description or "", + ) + + console.print(table) + + +@app.command(name="show") +@catch_exception(console=console) +async def schema_show( + kind: str = typer.Argument(..., help="Schema kind to display"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Show details for a specific schema kind. + + Displays metadata, attributes, and relationships for the requested + schema kind in a human-readable format. + + Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + branch: Target branch name. + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + schema = await client.schema.get(kind=kind, branch=branch) + + console.print(f"\n[bold]{schema.kind}[/bold]") + if schema.description: + console.print(f" {schema.description}") + console.print(f" Namespace: {schema.namespace}") + console.print(f" Display Labels: {schema.display_labels or 'N/A'}") + console.print(f" Human Friendly ID: {schema.human_friendly_id or 'N/A'}") + + if schema.attributes: + attr_table = Table(title="Attributes") + attr_table.add_column("Name") + attr_table.add_column("Type") + attr_table.add_column("Required") + attr_table.add_column("Default") + attr_table.add_column("Description") + + for attr in schema.attributes: + attr_table.add_row( + attr.name, + str(attr.kind), + "Yes" if not attr.optional else "No", + str(attr.default_value) if attr.default_value is not None else "", + attr.description or "", + ) + console.print(attr_table) + + if schema.relationships: + rel_table = Table(title="Relationships") + rel_table.add_column("Name") + rel_table.add_column("Peer") + rel_table.add_column("Cardinality") + rel_table.add_column("Optional") + + for rel in schema.relationships: + rel_table.add_row( + rel.name, + rel.peer, + rel.cardinality, + "Yes" if rel.optional else "No", + ) + console.print(rel_table) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py new file mode 100644 index 00000000..4f9277b9 --- /dev/null +++ b/infrahub_sdk/ctl/commands/update.py @@ -0,0 +1,143 @@ +"""Update command for the ``infrahub`` end-user CLI. + +Fetches an existing object by kind and identifier, applies field changes +supplied via ``--set`` flags or a ``--file`` path, and saves the result. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import typer # pyright: ignore[reportMissingImports] +from rich.console import Console # pyright: ignore[reportMissingImports] + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields +from infrahub_sdk.ctl.utils import catch_exception +from infrahub_sdk.spec.object import ObjectFile + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + +console = Console() + + +@catch_exception(console=console) +async def update_command( + kind: str = typer.Argument(..., help="Infrahub schema kind"), + identifier: str = typer.Argument(..., help="Object ID or display name"), + set_args: list[str] | None = typer.Option(None, "--set", help="Field value in key=value format"), + file: Path | None = typer.Option(None, "--file", "-f", help="JSON or YAML file with update data"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Update an existing object in Infrahub. + + Fetches the object identified by KIND and IDENTIFIER, applies the + requested changes, and saves the updated object back to the server. + + Changes can be provided either as repeatable ``--set key=value`` + flags or via a ``--file`` pointing to a YAML/JSON object file. + The two modes are mutually exclusive. + + Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: Object UUID or human-readable display name. + set_args: Repeatable key=value pairs for inline field updates. + file: Path to a YAML or JSON object file with update data. + branch: Target branch for the operation. + """ + if set_args and file: + raise typer.BadParameter("--set and --file are mutually exclusive.") + + if not set_args and not file: + raise typer.BadParameter("Provide either --set or --file to specify update data.") + + client = initialize_client(branch=branch) + + if set_args: + await _update_with_set_args( + client=client, + kind=kind, + identifier=identifier, + set_args=set_args, + branch=branch, + ) + elif file: + await _update_with_file( + client=client, + file=file, + branch=branch, + ) + + +async def _update_with_set_args( + client: InfrahubClient, + kind: str, + identifier: str, + set_args: list[str], + branch: str | None, +) -> None: + """Apply inline --set key=value updates to an existing object. + + Parses the set arguments, validates them against the schema, fetches + the target node, applies changes, and saves. + + Args: + client: Initialised async Infrahub client. + kind: Infrahub schema kind. + identifier: Object UUID or display name. + set_args: List of "key=value" strings. + branch: Optional target branch. + """ + data = parse_set_args(set_args) + schema = await client.schema.get(kind=kind, branch=branch) + validate_set_fields(data, schema.attribute_names, schema.relationship_names) + + node = await client.get(kind=kind, id=identifier) + + changes: list[tuple[str, object, str]] = [] + for key, new_value in data.items(): + if key in schema.attribute_names: + attr = getattr(node, key) + old_value = attr.value + attr.value = new_value + changes.append((key, old_value, new_value)) + elif key in schema.relationship_names: + rel = getattr(node, key) + old_id = getattr(rel, "id", None) + await rel.fetch() # type: ignore[union-attr] + old_display = getattr(rel, "display_label", old_id) + setattr(node, key, {"id": new_value}) + changes.append((key, old_display, new_value)) + + await node.save() + + console.print(f"[green]Updated {kind} '{identifier}' successfully.") + for field_name, old_val, new_val in changes: + console.print(f" {field_name}: {old_val} -> {new_val}") + + +async def _update_with_file( + client: InfrahubClient, + file: Path, + branch: str | None, +) -> None: + """Apply updates from a YAML/JSON object file. + + Loads the file, validates its format against the server schema, + then processes it to apply the changes. + + Args: + client: Initialised async Infrahub client. + file: Path to the YAML or JSON object file. + branch: Optional target branch. + """ + files = ObjectFile.load_from_disk(paths=[file]) + for obj_file in files: + await obj_file.validate_format(client=client, branch=branch) + await obj_file.process(client=client, branch=branch) + + console.print(f"[green]Processed update from file '{file}' successfully.") diff --git a/infrahub_sdk/ctl/enduser_cli.py b/infrahub_sdk/ctl/enduser_cli.py new file mode 100644 index 00000000..8f617520 --- /dev/null +++ b/infrahub_sdk/ctl/enduser_cli.py @@ -0,0 +1,19 @@ +"""Entry point for the ``infrahub`` end-user CLI. + +This module mirrors the pattern used by ``infrahub_sdk.ctl.cli`` but loads +the end-user command set from ``enduser_commands`` instead. +""" + +from __future__ import annotations + +import sys + +try: + from .enduser_commands import app +except ImportError as exc: + sys.exit( + f"Module {exc.name} is not available, install the 'ctl' extra of the infrahub-sdk package, " + f"`pip install 'infrahub-sdk[ctl]'` or run `uv sync --extra ctl`." + ) + +__all__ = ["app"] diff --git a/infrahub_sdk/ctl/enduser_commands.py b/infrahub_sdk/ctl/enduser_commands.py new file mode 100644 index 00000000..7e6f710f --- /dev/null +++ b/infrahub_sdk/ctl/enduser_commands.py @@ -0,0 +1,54 @@ +"""Main command registration for the ``infrahub`` end-user CLI. + +Registers top-level commands (get, create, update, delete) and the ``schema`` +subcommand group. A ``--version`` flag is available on the root command. +""" + +from __future__ import annotations + +import typer + +from infrahub_sdk import __version__ as sdk_version +from infrahub_sdk.async_typer import AsyncTyper +from infrahub_sdk.ctl.commands.create import create_command +from infrahub_sdk.ctl.commands.delete import delete_command +from infrahub_sdk.ctl.commands.get import get_command +from infrahub_sdk.ctl.commands.schema import app as schema_app +from infrahub_sdk.ctl.commands.update import update_command + +app = AsyncTyper(pretty_exceptions_show_locals=False) + + +def _version_callback(value: bool) -> None: + """Print the SDK version and exit. + + Args: + value: Whether the ``--version`` flag was passed. + + Raises: + typer.Exit: Always raised after printing the version. + """ + if value: + typer.echo(f"infrahub v{sdk_version}") + raise typer.Exit + + +@app.callback(invoke_without_command=True) +def main( + version: bool | None = typer.Option( + None, + "--version", + "-V", + help="Show the SDK version and exit.", + callback=_version_callback, + is_eager=True, + ), +) -> None: + """Infrahub CLI -- interact with an Infrahub instance from the command line.""" + + +app.command(name="get")(get_command) +app.command(name="create")(create_command) +app.command(name="update")(update_command) +app.command(name="delete")(delete_command) +app.add_typer(schema_app, name="schema") diff --git a/infrahub_sdk/ctl/formatters/__init__.py b/infrahub_sdk/ctl/formatters/__init__.py new file mode 100644 index 00000000..bb3fe010 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/__init__.py @@ -0,0 +1,76 @@ +"""Output formatters for the ``infrahub`` end-user CLI. + +Provides an ``OutputFormat`` enum for selecting the desired output style and a +``get_formatter`` factory that returns the appropriate ``BaseFormatter`` +implementation. +""" + +from __future__ import annotations + +import sys +from enum import Enum +from typing import TYPE_CHECKING + +from infrahub_sdk.ctl.formatters.csv import CsvFormatter +from infrahub_sdk.ctl.formatters.json import JsonFormatter +from infrahub_sdk.ctl.formatters.table import TableFormatter +from infrahub_sdk.ctl.formatters.yaml import YamlFormatter + +if TYPE_CHECKING: + from infrahub_sdk.ctl.formatters.base import BaseFormatter + +__all__ = [ + "CsvFormatter", + "JsonFormatter", + "OutputFormat", + "TableFormatter", + "YamlFormatter", + "detect_output_format", + "get_formatter", +] + + +class OutputFormat(str, Enum): + """Supported CLI output formats.""" + + TABLE = "table" + JSON = "json" + CSV = "csv" + YAML = "yaml" + + +def detect_output_format() -> OutputFormat: + """Auto-detect output format based on whether stdout is a TTY. + + Returns: + ``OutputFormat.TABLE`` when stdout is connected to a terminal, + ``OutputFormat.JSON`` otherwise (e.g. when piped). + """ + return OutputFormat.TABLE if sys.stdout.isatty() else OutputFormat.JSON + + +def get_formatter(output_format: OutputFormat) -> BaseFormatter: + """Return the appropriate formatter for the given output format. + + Args: + output_format: The desired output format. + + Returns: + A ``BaseFormatter`` subclass instance matching *output_format*. + + Raises: + ValueError: If *output_format* is not a recognised format. + """ + formatters: dict[OutputFormat, type[BaseFormatter]] = { + OutputFormat.TABLE: TableFormatter, + OutputFormat.JSON: JsonFormatter, + OutputFormat.CSV: CsvFormatter, + OutputFormat.YAML: YamlFormatter, + } + + formatter_class = formatters.get(output_format) + if formatter_class is None: + msg = f"Unsupported output format: {output_format}" + raise ValueError(msg) + + return formatter_class() diff --git a/infrahub_sdk/ctl/formatters/base.py b/infrahub_sdk/ctl/formatters/base.py new file mode 100644 index 00000000..74fc389e --- /dev/null +++ b/infrahub_sdk/ctl/formatters/base.py @@ -0,0 +1,165 @@ +"""Base formatter protocol and shared helper functions for node data extraction.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Protocol + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class BaseFormatter(Protocol): + """Protocol defining the interface all formatters must implement. + + Formatters convert InfrahubNode objects into string representations + for display in various output formats (table, JSON, CSV, YAML). + """ + + def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + """Format a list of nodes for display. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + + Returns: + Formatted string representation of all nodes. + """ + ... + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node's detail view. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + Formatted string with full node details. + """ + ... + + +def _extract_relationship_value( + node: InfrahubNode, + rel_name: str, + cardinality: str, +) -> str: + """Extract a display value from a relationship on a node. + + Args: + node: The node containing the relationship. + rel_name: Name of the relationship attribute. + cardinality: Either "one" or "many". + + Returns: + Display string for the relationship value. + """ + rel = getattr(node, rel_name, None) + if rel is None: + return "" + + if cardinality == "one": + return rel.display_label or rel.id or "" + + # cardinality == "many": RelationshipManager with .peers + peers = getattr(rel, "peers", []) + labels = [p.display_label or p.id or "" for p in peers] + return ", ".join(labels) + + +def extract_node_data( + node: InfrahubNode, + schema: MainSchemaTypesAPI, +) -> dict[str, Any]: + """Extract a flat dict of field names to display values from a node. + + Handles both attributes and relationships. Attribute values of None + are converted to empty strings. Relationship values are rendered as + display labels. + + Args: + node: The InfrahubNode to extract data from. + schema: Schema definition describing attributes and relationships. + + Returns: + Dict mapping field names to their string display values. + """ + data: dict[str, Any] = {} + + for attr_name in schema.attribute_names: + attr = getattr(node, attr_name, None) + value = attr.value if attr is not None else None + data[attr_name] = value if value is not None else "" + + for rel_name in schema.relationship_names: + rel_schema = schema.get_relationship(rel_name) + data[rel_name] = _extract_relationship_value(node, rel_name, rel_schema.cardinality) + + return data + + +def extract_node_detail( + node: InfrahubNode, + schema: MainSchemaTypesAPI, +) -> dict[str, Any]: + """Extract a rich detail dict from a node including metadata. + + Similar to extract_node_data but includes the node ID, display label, + and schema kind as additional metadata fields. + + Args: + node: The InfrahubNode to extract data from. + schema: Schema definition describing attributes and relationships. + + Returns: + Dict with metadata fields (id, display_label, kind) followed + by attribute and relationship values. + """ + detail: dict[str, Any] = { + "id": node.id or "", + "display_label": node.display_label or "", + "kind": schema.kind, + } + + for attr_name in schema.attribute_names: + attr = getattr(node, attr_name, None) + if attr is not None: + detail[attr_name] = { + "value": attr.value if attr.value is not None else "", + } + else: + detail[attr_name] = {"value": ""} + + for rel_name in schema.relationship_names: + rel_schema = schema.get_relationship(rel_name) + rel = getattr(node, rel_name, None) + + if rel_schema.cardinality == "one": + if rel is not None: + detail[rel_name] = { + "display_label": rel.display_label or "", + "id": rel.id or "", + "cardinality": "one", + } + else: + detail[rel_name] = { + "display_label": "", + "id": "", + "cardinality": "one", + } + else: + peers = getattr(rel, "peers", []) if rel is not None else [] + detail[rel_name] = { + "peers": [ + { + "display_label": p.display_label or "", + "id": p.id or "", + } + for p in peers + ], + "cardinality": "many", + } + + return detail diff --git a/infrahub_sdk/ctl/formatters/csv.py b/infrahub_sdk/ctl/formatters/csv.py new file mode 100644 index 00000000..1723c8a9 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/csv.py @@ -0,0 +1,91 @@ +"""CSV formatter for InfrahubNode query results.""" + +from __future__ import annotations + +import csv +import io +from typing import TYPE_CHECKING + +from .base import extract_node_data, extract_node_detail + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class CsvFormatter: + """Formats InfrahubNode data as CSV strings. + + Uses stdlib csv module for proper escaping and quoting of values. + """ + + def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + """Format a list of nodes as CSV with a header row. + + Columns correspond to schema attribute and relationship names. + Each node produces one data row. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + + Returns: + CSV string with header and data rows. + """ + columns = schema.attribute_names + schema.relationship_names + output = io.StringIO() + writer = csv.writer(output) + + writer.writerow(columns) + + for node in nodes: + row_data = extract_node_data(node, schema) + writer.writerow([str(row_data.get(col, "")) for col in columns]) + + return output.getvalue() + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a two-column CSV (field, value). + + Includes metadata fields (id, display_label, kind) followed + by all attributes and relationships. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + CSV string with field/value columns. + """ + detail = extract_node_detail(node, schema) + output = io.StringIO() + writer = csv.writer(output) + + writer.writerow(["field", "value"]) + + # Metadata rows + writer.writerow(["id", str(detail.get("id", ""))]) + writer.writerow(["display_label", str(detail.get("display_label", ""))]) + writer.writerow(["kind", str(detail.get("kind", ""))]) + + # Attribute rows + for attr_name in schema.attribute_names: + attr_detail = detail.get(attr_name, {}) + value = attr_detail.get("value", "") if isinstance(attr_detail, dict) else attr_detail + writer.writerow([attr_name, str(value)]) + + # Relationship rows + for rel_name in schema.relationship_names: + rel_detail = detail.get(rel_name, {}) + if not isinstance(rel_detail, dict): + writer.writerow([rel_name, str(rel_detail)]) + continue + + if rel_detail.get("cardinality") == "one": + writer.writerow([rel_name, str(rel_detail.get("display_label", ""))]) + else: + peers = rel_detail.get("peers", []) + labels = [p.get("display_label", "") for p in peers] + writer.writerow([rel_name, ", ".join(labels)]) + + return output.getvalue() diff --git a/infrahub_sdk/ctl/formatters/json.py b/infrahub_sdk/ctl/formatters/json.py new file mode 100644 index 00000000..a61af4db --- /dev/null +++ b/infrahub_sdk/ctl/formatters/json.py @@ -0,0 +1,51 @@ +"""JSON formatter for InfrahubNode query results.""" + +from __future__ import annotations + +import json +from typing import TYPE_CHECKING + +from .base import extract_node_data, extract_node_detail + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class JsonFormatter: + """Formats InfrahubNode data as JSON strings. + + Uses stdlib json module with indentation for readable output. + """ + + def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + """Format a list of nodes as a JSON array. + + Each node is represented as a dict with attribute and + relationship field names as keys. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + + Returns: + JSON array string. + """ + items = [extract_node_data(node, schema) for node in nodes] + return json.dumps(items, indent=2, default=str) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a JSON object. + + Includes metadata (id, display_label, kind) along with + all attributes and relationships. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + JSON object string. + """ + detail = extract_node_detail(node, schema) + return json.dumps(detail, indent=2, default=str) diff --git a/infrahub_sdk/ctl/formatters/table.py b/infrahub_sdk/ctl/formatters/table.py new file mode 100644 index 00000000..9e76de15 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/table.py @@ -0,0 +1,113 @@ +"""Rich table formatter for InfrahubNode query results.""" + +from __future__ import annotations + +from io import StringIO +from typing import TYPE_CHECKING + +from rich.console import Console +from rich.table import Table + +from .base import extract_node_data, extract_node_detail + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class TableFormatter: + """Formats InfrahubNode data as Rich tables. + + Uses Rich library to render attractive, aligned tables with + column headers derived from the schema field names. + """ + + def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + """Format a list of nodes as a Rich table. + + Creates a table with one column per attribute and relationship, + and one row per node. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + + Returns: + Rendered table string. + """ + columns = schema.attribute_names + schema.relationship_names + table = Table(title=schema.kind, show_lines=False) + + for col in columns: + table.add_column(col, overflow="fold") + + for node in nodes: + row_data = extract_node_data(node, schema) + table.add_row(*(str(row_data.get(col, "")) for col in columns)) + + return self._render(table) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a key-value detail view. + + Renders a two-column table (Field / Value) with metadata, + attributes, and relationships sections. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + Rendered detail string. + """ + detail = extract_node_detail(node, schema) + + table = Table( + title=f"{schema.kind} Detail", + show_lines=True, + ) + table.add_column("Field", style="bold") + table.add_column("Value") + + # Metadata section + table.add_row("id", str(detail.get("id", ""))) + table.add_row("display_label", str(detail.get("display_label", ""))) + table.add_row("kind", str(detail.get("kind", ""))) + + # Attributes section + for attr_name in schema.attribute_names: + attr_detail = detail.get(attr_name, {}) + value = attr_detail.get("value", "") if isinstance(attr_detail, dict) else attr_detail + table.add_row(attr_name, str(value)) + + # Relationships section + for rel_name in schema.relationship_names: + rel_detail = detail.get(rel_name, {}) + if not isinstance(rel_detail, dict): + table.add_row(rel_name, str(rel_detail)) + continue + + if rel_detail.get("cardinality") == "one": + label = rel_detail.get("display_label", "") + table.add_row(rel_name, str(label)) + else: + peers = rel_detail.get("peers", []) + labels = [p.get("display_label", "") for p in peers] + table.add_row(rel_name, ", ".join(labels)) + + return self._render(table) + + @staticmethod + def _render(renderable: Table) -> str: + """Capture Rich renderable output to a string. + + Args: + renderable: A Rich Table or other renderable object. + + Returns: + The rendered string output. + """ + buffer = StringIO() + console = Console(file=buffer, force_terminal=False, width=120) + console.print(renderable) + return buffer.getvalue() diff --git a/infrahub_sdk/ctl/formatters/yaml.py b/infrahub_sdk/ctl/formatters/yaml.py new file mode 100644 index 00000000..499ef757 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/yaml.py @@ -0,0 +1,132 @@ +"""YAML formatter for InfrahubNode query results in Infrahub object format.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +import yaml + +from .base import extract_node_detail + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + +_INFRAHUB_API_VERSION = "infrahub.app/v1" +_INFRAHUB_KIND = "Object" + + +class YamlFormatter: + """Formats InfrahubNode data as YAML in the Infrahub object spec format. + + Output follows the standard Infrahub file structure:: + + --- + apiVersion: infrahub.app/v1 + kind: Object + spec: + kind: + data: + - field1: value1 + field2: value2 + """ + + def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + """Format a list of nodes as an Infrahub YAML object document. + + Each node becomes an entry in the spec.data array with its + attribute and relationship values. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + + Returns: + YAML string in Infrahub object format. + """ + data_items = [self._node_to_data_entry(node, schema) for node in nodes] + return self._build_document(schema.kind, data_items) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as an Infrahub YAML object document. + + The spec.data array contains a single entry for the node. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + YAML string in Infrahub object format. + """ + data_entry = self._node_to_data_entry(node, schema) + return self._build_document(schema.kind, [data_entry]) + + def _node_to_data_entry( + self, + node: InfrahubNode, + schema: MainSchemaTypesAPI, + ) -> dict[str, Any]: + """Convert a single node into a data entry dict for YAML output. + + Args: + node: The InfrahubNode to convert. + schema: Schema definition for the node kind. + + Returns: + Dict suitable for inclusion in the spec.data array. + """ + detail = extract_node_detail(node, schema) + entry: dict[str, Any] = {} + + # Attributes: extract plain values + for attr_name in schema.attribute_names: + attr_detail = detail.get(attr_name, {}) + if isinstance(attr_detail, dict): + entry[attr_name] = attr_detail.get("value", "") + else: + entry[attr_name] = attr_detail + + # Relationships: format depends on cardinality + for rel_name in schema.relationship_names: + rel_detail = detail.get(rel_name, {}) + if not isinstance(rel_detail, dict): + entry[rel_name] = rel_detail + continue + + if rel_detail.get("cardinality") == "one": + entry[rel_name] = rel_detail.get("display_label", "") + else: + peers = rel_detail.get("peers", []) + if peers: + entry[rel_name] = {"data": [p.get("display_label", "") for p in peers]} + else: + entry[rel_name] = {"data": []} + + return entry + + @staticmethod + def _build_document(kind: str, data: list[dict[str, Any]]) -> str: + """Build the full Infrahub YAML document structure. + + Args: + kind: The schema kind string (e.g. "InfraDevice"). + data: List of data entry dicts for the spec.data array. + + Returns: + Complete YAML document string with leading '---' separator. + """ + document = { + "apiVersion": _INFRAHUB_API_VERSION, + "kind": _INFRAHUB_KIND, + "spec": { + "kind": kind, + "data": data, + }, + } + return "---\n" + yaml.dump( + document, + default_flow_style=False, + sort_keys=False, + allow_unicode=True, + ) diff --git a/infrahub_sdk/ctl/parsers.py b/infrahub_sdk/ctl/parsers.py new file mode 100644 index 00000000..e68baa8a --- /dev/null +++ b/infrahub_sdk/ctl/parsers.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from typing import Any + +import typer + + +def parse_set_args(set_args: list[str]) -> dict[str, str]: + """Parse --set key=value arguments into a dictionary. + + Splits each argument on the first ``=`` sign, allowing values + to contain additional ``=`` characters. + + Args: + set_args: List of "key=value" strings from the CLI. + + Returns: + Dictionary mapping field names to string values. + + Raises: + typer.BadParameter: If any argument is not in key=value format. + """ + result: dict[str, str] = {} + for arg in set_args: + if "=" not in arg: + raise typer.BadParameter(f"Invalid format '{arg}'. Expected key=value.") + key, value = arg.split("=", maxsplit=1) + key = key.strip() + if not key: + raise typer.BadParameter(f"Invalid format '{arg}'. Key must not be empty.") + result[key] = value + return result + + +def parse_filter_args(filter_args: list[str]) -> dict[str, Any]: + """Parse --filter arguments into kwargs for client.filters(). + + Uses the same split-on-first-``=`` logic as :func:`parse_set_args`. + Keys are expected to follow SDK filter conventions + (e.g. ``attribute__value``, ``relationship__id``) but format + validation is left to the SDK. + + Args: + filter_args: List of "attr__value=x" strings from the CLI. + + Returns: + Dictionary of filter kwargs to pass to client.filters(). + + Raises: + typer.BadParameter: If any argument is not in key=value format. + """ + result: dict[str, Any] = {} + for arg in filter_args: + if "=" not in arg: + raise typer.BadParameter(f"Invalid format '{arg}'. Expected key=value.") + key, value = arg.split("=", maxsplit=1) + key = key.strip() + if not key: + raise typer.BadParameter(f"Invalid format '{arg}'. Key must not be empty.") + result[key] = value + return result + + +def validate_set_fields( + data: dict[str, str], + attribute_names: list[str], + relationship_names: list[str], +) -> None: + """Validate that all keys in data are valid attribute or relationship names. + + Args: + data: Parsed set data from parse_set_args. + attribute_names: Valid attribute names from schema. + relationship_names: Valid relationship names from schema. + + Raises: + typer.BadParameter: If any key is not a valid field name, + with a message listing valid fields. + """ + valid_fields = set(attribute_names) | set(relationship_names) + invalid_keys = sorted(set(data.keys()) - valid_fields) + if invalid_keys: + valid_sorted = sorted(valid_fields) + raise typer.BadParameter( + f"Unknown field(s): {', '.join(invalid_keys)}. Valid fields: {', '.join(valid_sorted)}" + ) diff --git a/pyproject.toml b/pyproject.toml index 4e0716a6..9b7f0805 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ Documentation = "https://docs.infrahub.app/python-sdk/introduction" [project.scripts] infrahubctl = "infrahub_sdk.ctl.cli:app" +infrahub = "infrahub_sdk.ctl.enduser_cli:app" [project.entry-points."pytest11"] "pytest-infrahub" = "infrahub_sdk.pytest_plugin.plugin" diff --git a/specs/001-end-user-cli/checklists/requirements.md b/specs/001-end-user-cli/checklists/requirements.md new file mode 100644 index 00000000..ecd3be28 --- /dev/null +++ b/specs/001-end-user-cli/checklists/requirements.md @@ -0,0 +1,36 @@ +# Specification Quality Checklist: End-User CLI (`infrahub` command) + +**Purpose**: Validate specification completeness and quality before proceeding to planning +**Created**: 2026-03-28 +**Feature**: [spec.md](../spec.md) + +## Content Quality + +- [x] No implementation details (languages, frameworks, APIs) +- [x] Focused on user value and business needs +- [x] Written for non-technical stakeholders +- [x] All mandatory sections completed + +## Requirement Completeness + +- [x] No [NEEDS CLARIFICATION] markers remain +- [x] Requirements are testable and unambiguous +- [x] Success criteria are measurable +- [x] Success criteria are technology-agnostic (no implementation details) +- [x] All acceptance scenarios are defined +- [x] Edge cases are identified +- [x] Scope is clearly bounded +- [x] Dependencies and assumptions identified + +## Feature Readiness + +- [x] All functional requirements have clear acceptance criteria +- [x] User scenarios cover primary flows +- [x] Feature meets measurable outcomes defined in Success Criteria +- [x] No implementation details leak into specification + +## Notes + +- All items pass validation. Spec is ready for `/speckit.clarify` or `/speckit.plan`. +- Assumptions section documents reasonable defaults for unspecified details. +- CLI command examples in acceptance scenarios use generic syntax (not framework-specific). diff --git a/specs/001-end-user-cli/contracts/cli-commands.md b/specs/001-end-user-cli/contracts/cli-commands.md new file mode 100644 index 00000000..0abc8aa1 --- /dev/null +++ b/specs/001-end-user-cli/contracts/cli-commands.md @@ -0,0 +1,61 @@ +# CLI Command Contracts + +## Global Options + +All commands accept: +- `--branch TEXT` — Target Infrahub branch (default: from config) +- `--config-file PATH` — Configuration file path (default: infrahubctl.toml) +- `--output [table|json|csv|yaml]` — Output format (default: table if TTY, json if piped) + +## `infrahub get [identifier]` + +**List mode** (no identifier): +- Input: kind (positional), --filter (repeatable), --limit INT, --offset INT +- Output: Table with columns for each attribute + relationship (display names) +- Exit 0: results found | Exit 0: no results (empty table) | Exit 1: invalid kind + +**Detail mode** (with identifier): +- Input: kind (positional), identifier (positional — UUID or display name) +- Output: Key-value display of all attributes, relationships, metadata +- Exit 0: found | Exit 1: not found + +**Filters**: `--filter name__value="spine01"` (repeatable) + +## `infrahub create ` + +- Input: kind (positional), --set key=value (repeatable), --file PATH +- --set and --file are mutually exclusive +- Output: Confirmation with created object ID and display label +- Exit 0: created | Exit 1: validation error | Exit 1: server error + +**File input**: JSON or YAML in Infrahub Object format +(`apiVersion: infrahub.app/v1`) + +## `infrahub update ` + +- Input: kind (positional), identifier (positional), --set key=value + (repeatable), --file PATH +- --set and --file are mutually exclusive +- Output: Confirmation with old → new values for changed fields +- Exit 0: updated | Exit 1: not found | Exit 1: validation error + +## `infrahub delete ` + +- Input: kind (positional), identifier (positional), --yes (skip confirmation) +- Output: Confirmation prompt (unless --yes), then success message +- Exit 0: deleted | Exit 1: not found | Exit 1: dependency conflict + +## `infrahub schema list` + +- Input: --filter TEXT (substring match on kind name) +- Output: Table with columns: Namespace, Name, Kind, Description +- Exit 0: always (empty table if no matches) + +## `infrahub schema show ` + +- Input: kind (positional) +- Output: Formatted display of: + - Kind metadata (namespace, label, description, display_labels, HFID) + - Attributes table (name, type, required, default, description) + - Relationships table (name, peer kind, cardinality, optional) +- Exit 0: found | Exit 1: invalid kind diff --git a/specs/001-end-user-cli/data-model.md b/specs/001-end-user-cli/data-model.md new file mode 100644 index 00000000..e83b843f --- /dev/null +++ b/specs/001-end-user-cli/data-model.md @@ -0,0 +1,48 @@ +# Data Model: End-User CLI + +This feature does not introduce new persistent data entities. It operates on +Infrahub's existing data model (Kinds, Nodes, Attributes, Relationships) via +the SDK client. + +The CLI introduces transient structures for formatting and serialization: + +## Output Format Envelope + +Used when serializing query results to YAML output format. + +**Fields**: +- `apiVersion` (str): Always `"infrahub.app/v1"` +- `kind` (str): Always `"Object"` +- `spec.kind` (str): The Infrahub Kind being exported (e.g., `"InfraDevice"`) +- `spec.data` (list[dict]): Array of serialized node objects + +Each node in `spec.data` contains: +- Attribute fields as `key: value` pairs +- Relationship fields as `key: display_name` (single) or + `key: {data: [list]}` (many) + +This structure matches the existing `InfrahubObjectFileData` model in +`infrahub_sdk/spec/object.py` and is round-trippable with `ObjectFile`. + +## Set Flag Parser + +Parses `--set key=value` arguments into a dict suitable for SDK calls. + +**Input**: List of `"key=value"` strings from CLI +**Output**: `dict[str, str | list[str]]` + +Validation rules: +- Key MUST exist as an attribute or relationship name in the target Kind's schema +- Value is a string; the SDK handles type coercion +- For relationships, value is the display name or UUID of the target node + +## Filter Parser + +Parses `--filter key=value` arguments into kwargs for `client.filters()`. + +**Input**: List of `"attribute__value=x"` strings from CLI +**Output**: `dict[str, Any]` passed as `**kwargs` + +Validation rules: +- Key MUST follow the `attribute__value` or `relationship__id` pattern +- Invalid keys produce a validation error with available field names diff --git a/specs/001-end-user-cli/plan.md b/specs/001-end-user-cli/plan.md new file mode 100644 index 00000000..c49ea5f9 --- /dev/null +++ b/specs/001-end-user-cli/plan.md @@ -0,0 +1,113 @@ +# Implementation Plan: End-User CLI (`infrahub` command) + +**Branch**: `001-end-user-cli` | **Date**: 2026-03-28 | **Spec**: [spec.md](spec.md) +**Input**: Feature specification from `specs/001-end-user-cli/spec.md` + +## Summary + +Create a new `infrahub` CLI entry point for end users to perform CRUD operations +on Infrahub data and discover schema. The CLI reuses the existing SDK client, +configuration, and AsyncTyper framework from `infrahubctl`, adding commands for +`get`, `create`, `update`, `delete`, and `schema` operations with multiple output +formats including round-trippable Infrahub Object YAML. + +## Technical Context + +**Language/Version**: Python 3.10-3.13 +**Primary Dependencies**: typer (via AsyncTyper), rich, pyyaml, httpx (via SDK client) +**Storage**: N/A (all data in Infrahub server via SDK) +**Testing**: pytest (unit + integration) +**Target Platform**: Linux, macOS, Windows (CLI) +**Project Type**: Single project (extension of existing SDK package) +**Performance Goals**: Query results < 5s for < 1000 objects +**Constraints**: Must coexist with `infrahubctl`; shared config +**Scale/Scope**: ~10 new modules, ~1500-2000 lines of production code + +## Constitution Check + +*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.* + +| Principle | Status | Notes | +|-----------|--------|-------| +| I. Async/Sync Dual Pattern | PASS | CLI commands are async (via AsyncTyper). No new public SDK API surface requiring dual pattern — CLI is async-only consumer. | +| II. Type Safety | PASS | All new functions will have type hints. mypy/ty must pass. | +| III. Test Discipline | PASS | FR-015 requires unit + integration tests. 70% coverage target. | +| IV. API Stability | PASS | New entry point, no changes to existing public API. No new dependencies needed. | +| V. Documentation Completeness | PASS | Google-style docstrings required. `docs-generate` after CLI changes. | + +No violations. No complexity tracking needed. + +## Project Structure + +### Documentation (this feature) + +```text +specs/001-end-user-cli/ +├── plan.md # This file +├── spec.md # Feature specification +├── research.md # Phase 0 research findings +├── data-model.md # Data model (transient structures) +├── quickstart.md # Usage quickstart guide +├── contracts/ +│ └── cli-commands.md # CLI command contracts +└── tasks.md # Phase 2 output (/speckit.tasks) +``` + +### Source Code (repository root) + +```text +infrahub_sdk/ctl/ +├── enduser_cli.py # New: main app + entry point for `infrahub` +├── enduser_commands.py # New: top-level command registration +├── commands/ +│ ├── __init__.py # New: commands package +│ ├── get.py # New: `infrahub get` command +│ ├── create.py # New: `infrahub create` command +│ ├── update.py # New: `infrahub update` command +│ ├── delete.py # New: `infrahub delete` command +│ └── schema.py # New: `infrahub schema` command group +├── formatters/ +│ ├── __init__.py # New: formatters package +│ ├── base.py # New: base formatter protocol/ABC +│ ├── table.py # New: Rich table formatter +│ ├── json.py # New: JSON formatter +│ ├── csv.py # New: CSV formatter +│ └── yaml.py # New: Infrahub Object YAML formatter +└── parsers.py # New: --set and --filter argument parsers + +tests/unit/ctl/ +├── commands/ +│ ├── __init__.py +│ ├── test_get.py # New: unit tests for get command +│ ├── test_create.py # New: unit tests for create command +│ ├── test_update.py # New: unit tests for update command +│ ├── test_delete.py # New: unit tests for delete command +│ └── test_schema.py # New: unit tests for schema commands +├── formatters/ +│ ├── __init__.py +│ ├── test_table.py # New: table formatter tests +│ ├── test_json.py # New: JSON formatter tests +│ ├── test_csv.py # New: CSV formatter tests +│ └── test_yaml.py # New: YAML formatter tests +└── test_parsers.py # New: parser tests + +tests/integration/ +└── test_enduser_cli.py # New: integration tests against Infrahub +``` + +**Structure Decision**: Extend `infrahub_sdk/ctl/` with a parallel entry point. +New commands go in a `commands/` subdirectory to separate end-user commands from +existing `infrahubctl` modules. Formatters are isolated in `formatters/` for +testability and reuse across commands. + +## Post-Design Constitution Re-Check + +| Principle | Status | Notes | +|-----------|--------|-------| +| I. Async/Sync Dual Pattern | PASS | No new public SDK API. CLI is async consumer only. | +| II. Type Safety | PASS | All modules typed. No generated code modified. | +| III. Test Discipline | PASS | Test structure mirrors source structure. Unit tests mock SDK client. Integration tests hit Infrahub. | +| IV. API Stability | PASS | New `infrahub` entry point in pyproject.toml. No existing API changes. pyyaml, rich, typer already in `[ctl]` deps. | +| V. Documentation Completeness | PASS | Each new module gets docstrings. `docs-generate` run after completion. | + +All gates pass. Ready for `/speckit.tasks`. diff --git a/specs/001-end-user-cli/quickstart.md b/specs/001-end-user-cli/quickstart.md new file mode 100644 index 00000000..04b8edef --- /dev/null +++ b/specs/001-end-user-cli/quickstart.md @@ -0,0 +1,116 @@ +# Quickstart: `infrahub` CLI + +## Prerequisites + +- Python 3.10+ +- Infrahub SDK installed with CLI extras: `pip install infrahub-sdk[ctl]` +- A running Infrahub instance + +## Configuration + +The `infrahub` command uses the same configuration as `infrahubctl`. + +Set via environment variables: + +```bash +export INFRAHUB_ADDRESS="http://localhost:8000" +export INFRAHUB_API_TOKEN="your-api-token" +``` + +Or via `infrahubctl.toml`: + +```toml +[infrahub] +server_address = "http://localhost:8000" +api_token = "your-api-token" +``` + +## Discover Your Schema + +```bash +# List all available kinds +infrahub schema list + +# Filter by name +infrahub schema list --filter "Device" + +# Show details for a specific kind +infrahub schema show InfraDevice +``` + +## Query Data + +```bash +# List all devices +infrahub get InfraDevice + +# Filter by attribute +infrahub get InfraDevice --filter name__value="spine01" + +# Get a single device's full details +infrahub get InfraDevice spine01 + +# Output as JSON (for scripting) +infrahub get InfraDevice --output json + +# Export as Infrahub Object YAML (round-trippable) +infrahub get InfraDevice --output yaml > devices.yaml + +# Query a specific branch +infrahub get InfraDevice --branch develop + +# Paginate results +infrahub get InfraDevice --limit 10 --offset 20 +``` + +## Create Objects + +```bash +# Create with inline flags +infrahub create InfraDevice \ + --set name="spine03" \ + --set description="New spine switch" \ + --set site="dc1" + +# Create from a YAML file +infrahub create InfraDevice --file new-devices.yaml +``` + +## Update Objects + +```bash +# Update an attribute +infrahub update InfraDevice spine03 \ + --set description="Updated spine switch" + +# Update from file +infrahub update InfraDevice spine03 --file updates.yaml +``` + +## Delete Objects + +```bash +# Delete with confirmation prompt +infrahub delete InfraDevice spine03 + +# Skip confirmation +infrahub delete InfraDevice spine03 --yes +``` + +## Output Formats + +| Format | Flag | Use Case | +|--------|------|----------| +| Table | `--output table` | Interactive terminal (default) | +| JSON | `--output json` | Scripting, piping (default when piped) | +| CSV | `--output csv` | Spreadsheet import, data analysis | +| YAML | `--output yaml` | Backup, round-trip with `--file` | + +## Validation + +To verify the CLI is working: + +1. `infrahub schema list` — confirms connection and authentication +2. `infrahub get ` — confirms data access +3. `infrahub get --output yaml > test.yaml` then + `infrahub create --file test.yaml` — confirms round-trip diff --git a/specs/001-end-user-cli/research.md b/specs/001-end-user-cli/research.md new file mode 100644 index 00000000..ea56ebcb --- /dev/null +++ b/specs/001-end-user-cli/research.md @@ -0,0 +1,104 @@ +# Research: End-User CLI (`infrahub` command) + +## R1: Entry Point & Packaging Strategy + +**Decision**: Add `infrahub` as a second entry point in `[project.scripts]` within the +same package, pointing to a new app in `infrahub_sdk/ctl/enduser_cli.py`. + +**Rationale**: The existing `infrahubctl` entry point lives in `infrahub_sdk/ctl/cli.py` +and uses the same `[ctl]` optional dependency group (typer, rich, click, pyyaml). The +end-user CLI needs identical dependencies. A separate package would duplicate +configuration, authentication, and client initialization code. A second entry point in +the same package reuses all existing infrastructure. + +**Alternatives considered**: +- Separate Python package: rejected — duplicates config/client code, complicates releases +- Subcommand of `infrahubctl`: rejected — user explicitly wants separate `infrahub` + command with end-user focus distinct from developer tooling + +## R2: CLI Framework & Async Pattern + +**Decision**: Use `AsyncTyper` (existing wrapper at `infrahub_sdk/async_typer.py`) with +Rich console output, matching the `infrahubctl` patterns exactly. + +**Rationale**: The project already has a proven async CLI pattern. AsyncTyper wraps +`asyncio.run()` around async command functions. All existing utilities (`catch_exception`, +`initialize_client`, `CONFIG_PARAM`) work with this pattern. + +**Alternatives considered**: +- Click directly: rejected — less ergonomic, would diverge from existing patterns +- Sync-only CLI: rejected — SDK client methods are async-first + +## R3: Query Implementation + +**Decision**: Use `client.all()` for list queries and `client.get()` for single-object +detail view. Filters pass through as `**kwargs` to `client.filters()`. + +**Rationale**: `client.all()` wraps `client.filters()` internally and supports +`offset`, `limit`, `prefetch_relationships`, `include`, `exclude`, and `order` +parameters. Filter syntax is `attribute__value="x"` or `relationship__id="uuid"`. +Pagination is handled automatically with `client.pagination_size`. + +**Key findings**: +- `node.display_label` provides the human-readable name for table display +- `node..value` accesses attribute values +- `schema.attribute_names` and `schema.relationship_names` enumerate fields +- `schema.display_labels` identifies which attributes form the display label + +## R4: Object YAML Round-Trip Format + +**Decision**: Reuse the existing `InfrahubObjectFileData` model from +`infrahub_sdk/spec/object.py` for file input. For YAML output, build the reverse: +serialize query results into the same `apiVersion: infrahub.app/v1` / `kind: Object` +structure. + +**Rationale**: The spec object format is already defined with Pydantic models. Input +parsing uses `ObjectFile.load_from_disk()` → `InfrahubObjectFileData.process()`. The +reverse direction needs a serializer that walks node attributes and relationships to +produce the same dict structure. + +**Key classes**: +- `InfrahubObjectFileData` — spec model with `kind`, `parameters`, `data` fields +- `ObjectFile` — file wrapper with `validate_content()` and `process()` methods +- Relationship formats: `ONE_REF`, `MANY_REF`, `ONE_OBJ`, `MANY_OBJ_DICT_LIST` + +## R5: Schema Discovery + +**Decision**: Use `client.schema.all(branch=branch)` for listing kinds and +`client.schema.get(kind=kind, branch=branch)` for kind details. + +**Rationale**: Schema API returns `NodeSchemaAPI` / `GenericSchemaAPI` objects with +`attribute_names`, `relationship_names`, `mandatory_input_names`, `display_labels`, +`human_friendly_id`, `namespace`, `label`, and `description` properties. + +## R6: Create/Update/Delete Implementation + +**Decision**: Use existing SDK CRUD methods: +- Create: `client.create(kind=kind, data=data)` → `node.save(allow_upsert=True)` +- Update: `client.get(kind=kind, id=identifier)` → modify attrs → `node.save()` +- Delete: `client.get(kind=kind, id=identifier)` → `node.delete()` + +**Rationale**: These are the standard SDK patterns used by `infrahubctl` commands. +The `--set` flag maps directly to the `data` dict passed to `client.create()` or +applied to node attributes before `node.save()`. + +**Key detail**: Identifier resolution accepts both UUID and HFID (human-friendly ID) +via the `id` parameter of `client.get()`. + +## R7: Configuration Reuse + +**Decision**: Reuse `infrahub_sdk/ctl/config.py` and `CONFIG_PARAM` from +`infrahub_sdk/ctl/parameters.py` directly. + +**Rationale**: The `Settings` class reads from `infrahubctl.toml` (or +`INFRAHUBCTL_CONFIG` env var) with `server_address`, `api_token`, and +`default_branch`. No new configuration mechanism needed. + +## R8: Output Formatting + +**Decision**: Implement four output formatters: table (Rich), JSON, CSV, YAML. +Auto-detect: table when stdout is a TTY, JSON when piped. + +**Rationale**: Rich is already a dependency. JSON uses stdlib `json`. CSV uses stdlib +`csv`. YAML uses `pyyaml` (already in `[ctl]` deps). The auto-detect pattern +(`sys.stdout.isatty()`) is standard CLI practice. diff --git a/specs/001-end-user-cli/spec.md b/specs/001-end-user-cli/spec.md new file mode 100644 index 00000000..3ffd252c --- /dev/null +++ b/specs/001-end-user-cli/spec.md @@ -0,0 +1,152 @@ +# Feature Specification: End-User CLI (`infrahub` command) + +**Feature Branch**: `001-end-user-cli` +**Created**: 2026-03-28 +**Status**: Draft +**Input**: User description: "Create an `infrahub` CLI command for end users to query, create, and modify data in the Infrahub database, distinct from the developer-oriented `infrahubctl`." + +## User Scenarios & Testing *(mandatory)* + +### User Story 1 - Query Data from Infrahub (Priority: P1) + +An end user wants to retrieve data from Infrahub to answer operational questions. They open a terminal, run a command specifying the type of object they want (e.g., devices, interfaces, IP addresses), and receive a formatted table of results. They can filter results by attribute values and choose output formats (table, JSON, CSV) depending on whether they are reading interactively or piping to another tool. + +**Why this priority**: Reading data is the most fundamental operation. Without query capability, no other CRUD operations provide value. This is also the lowest-risk operation (read-only) and serves the widest audience. + +**Independent Test**: Can be fully tested by querying any existing node type in an Infrahub instance and verifying correct output. Delivers immediate value for operational visibility. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance with data, **When** the user runs `infrahub get `, **Then** a formatted table of all objects of that kind is displayed with attribute columns and relationship columns (showing display names). +2. **Given** a running Infrahub instance, **When** the user runs `infrahub get --filter name__value="spine01"`, **Then** only objects matching the filter are returned. +3. **Given** a running Infrahub instance, **When** the user runs `infrahub get --output json`, **Then** the results are printed as valid JSON to stdout. +4. **Given** a running Infrahub instance, **When** the user runs `infrahub get --output yaml`, **Then** the results are printed in Infrahub Object YAML format (with `apiVersion: infrahub.app/v1`, `kind: Object`, `spec.kind`, and `spec.data` array), suitable for round-tripping back into `infrahub create --file`. +5. **Given** an Infrahub instance, **When** the user runs `infrahub get --branch develop`, **Then** data from the specified branch is returned. +6. **Given** an invalid kind name, **When** the user runs `infrahub get UnknownKind`, **Then** a clear error message is displayed listing available kinds or suggesting corrections. +7. **Given** an existing object, **When** the user runs `infrahub get `, **Then** a detailed view is displayed showing all attributes, relationships, and metadata for that single object. + +--- + +### User Story 2 - Create New Objects (Priority: P2) + +An end user needs to add new infrastructure data to Infrahub. They run a command specifying the object kind and its attribute values, and the system creates the object and confirms success. They can also create objects from a file (JSON or YAML) for batch operations. + +**Why this priority**: After querying, creation is the next most common operation. Users need to populate Infrahub with data. This is a natural progression from read to write. + +**Independent Test**: Can be tested by creating an object of any kind and then querying it back to verify it exists with correct attributes. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance, **When** the user runs `infrahub create --set name="spine03" --set description="New spine switch"`, **Then** the object is created and a confirmation with the object ID is displayed. +2. **Given** a YAML file with object definitions, **When** the user runs `infrahub create --file objects.yaml`, **Then** all objects in the file are created and a summary of results (created count, errors) is displayed. +3. **Given** invalid attribute or relationship names, **When** the user runs `infrahub create --set invalid_field="value"`, **Then** a clear validation error is displayed indicating which fields are invalid and what the valid attributes and relationships are. + +--- + +### User Story 3 - Modify Existing Objects (Priority: P3) + +An end user needs to update attributes on existing infrastructure objects. They identify the object by kind and name (or ID), specify the attributes to change, and the system applies the update and confirms. + +**Why this priority**: Modification completes the core CRUD workflow. Users who can query and create also need to update existing records as infrastructure changes. + +**Independent Test**: Can be tested by modifying an attribute on an existing object and querying it back to verify the change persists. + +**Acceptance Scenarios**: + +1. **Given** an existing object, **When** the user runs `infrahub update --set description="Updated description"`, **Then** the object is updated and a confirmation is displayed showing old and new values. +2. **Given** an existing object, **When** the user runs `infrahub update --file updates.yaml`, **Then** the object is updated from the file contents. +3. **Given** a non-existent object identifier, **When** the user runs `infrahub update nonexistent`, **Then** a clear error message indicates the object was not found. + +--- + +### User Story 4 - Delete Objects (Priority: P4) + +An end user needs to remove obsolete infrastructure data from Infrahub. They specify the object to delete by kind and identifier, confirm the deletion, and the system removes it. + +**Why this priority**: Deletion is the least frequent CRUD operation and the most dangerous. It completes the full lifecycle but is lower priority than the core read/create/update loop. + +**Independent Test**: Can be tested by creating an object, deleting it, and confirming it no longer appears in query results. + +**Acceptance Scenarios**: + +1. **Given** an existing object, **When** the user runs `infrahub delete `, **Then** a confirmation prompt is shown, and upon confirmation the object is deleted with a success message. +2. **Given** an existing object, **When** the user runs `infrahub delete --yes`, **Then** the object is deleted without a confirmation prompt. +3. **Given** an object with dependencies, **When** the user attempts to delete it, **Then** a clear error message explains what depends on it and how to resolve the conflict. + +--- + +### User Story 5 - Discover Available Schema (Priority: P5) + +An end user unfamiliar with the data model wants to explore what kinds of objects exist in Infrahub and what attributes each kind has. They run a command to list available kinds and inspect their schema. + +**Why this priority**: Schema discovery supports all other operations. Without knowing what kinds and attributes exist, users cannot effectively query, create, or update. However, this is a supporting operation, not a core data operation. + +**Independent Test**: Can be tested by listing schema kinds and inspecting a known kind's attributes against the actual schema definition. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance, **When** the user runs `infrahub schema list`, **Then** a table of all available kinds is displayed with their namespace, name, and description. +2. **Given** a valid kind name, **When** the user runs `infrahub schema show `, **Then** the kind's attributes, relationships, and constraints are displayed in a readable format. +3. **Given** a partial kind name, **When** the user runs `infrahub schema list --filter "Network"`, **Then** only kinds matching the filter are shown. + +--- + +### Edge Cases + +- What happens when the Infrahub server is unreachable? Clear connection error with the configured server address shown. +- What happens when the API token is missing or expired? Authentication error with instructions on how to configure credentials. +- What happens when the user queries a kind with thousands of objects? Results are paginated with a default limit and the user is informed of total count. +- What happens when a create/update operation partially fails in batch mode? A detailed report shows which objects succeeded and which failed, with per-object error messages. +- What happens when the user provides attributes in the wrong format? Validation error specifying expected format for each attribute. + +## Clarifications + +### Session 2026-03-28 + +- Q: How should users specify relationships in create/update commands? → A: Unified `--set` flag for both attributes and relationships (e.g., `--set name="x" --set site="my-site"`). +- Q: Should there be a single-object detail view? → A: `infrahub get ` shows a detail view with all attributes, relationships, and metadata. +- Q: How should relationships appear in list/table output? → A: Show as columns with their display name (e.g., site column shows "my-site"). Full relationship detail in detail view only. + +## Requirements *(mandatory)* + +### Functional Requirements + +- **FR-001**: The system MUST provide an `infrahub` CLI entry point, separate from `infrahubctl`. +- **FR-002**: The system MUST support querying objects by kind with `infrahub get ` (list view) and `infrahub get ` (detail view showing all attributes, relationships, and metadata). +- **FR-003**: The system MUST support filtering query results by attribute values. +- **FR-004**: The system MUST support multiple output formats: human-readable table (default), JSON, CSV, and Infrahub Object YAML (`--output yaml`). The YAML format MUST use the Infrahub spec object structure (`apiVersion: infrahub.app/v1`, `kind: Object`, with `spec.kind` and `spec.data` fields), matching the format used by `infrahub create --file`. +- **FR-005**: The system MUST support creating objects with `infrahub create ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-006**: The system MUST support updating objects with `infrahub update ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-007**: The system MUST support deleting objects with `infrahub delete ` with confirmation. +- **FR-008**: The system MUST support schema discovery with `infrahub schema list` and `infrahub schema show `. +- **FR-009**: The system MUST support specifying a target branch for all operations via `--branch`. +- **FR-010**: The system MUST reuse the existing SDK configuration mechanism (server address, API token) from `infrahubctl.toml` or environment variables. +- **FR-011**: The system MUST display clear, actionable error messages for all failure modes (connection, authentication, validation, not found). +- **FR-012**: The system MUST paginate large result sets with configurable page size via `--limit` and `--offset`. +- **FR-013**: The system MUST support batch operations from file input (JSON or YAML) for create and update commands. +- **FR-014**: The system MUST provide a `--yes` flag to skip confirmation prompts for destructive operations. +- **FR-015**: All new code MUST have unit tests covering public functions and integration tests covering Infrahub server interactions, consistent with the project's test discipline standards. + +### Key Entities + +- **Kind**: A type definition in the Infrahub schema (e.g., InfraDevice, IpamIPAddress). Has a namespace, name, attributes, and relationships. +- **Node**: An instance of a Kind stored in Infrahub. Has an ID, attribute values, and relationship connections. +- **Attribute**: A named property on a Kind with a type, optional constraints, and a value on each Node. +- **Relationship**: A typed connection between two Nodes, defined in the schema with cardinality and direction. + +## Assumptions + +- The `infrahub` command shares the same configuration file and environment variables as `infrahubctl` (no separate config needed). +- Object identifiers in update/delete commands accept either the object's display name or its UUID. +- The default output format for interactive terminals is a human-readable table; when stdout is piped, JSON is used automatically. +- Batch file input supports both JSON and YAML formats with the same schema. + +## Success Criteria *(mandatory)* + +### Measurable Outcomes + +- **SC-001**: Users can query any object kind and receive formatted results within 5 seconds for datasets under 1000 objects. +- **SC-002**: Users can create a single object in under 3 commands (configure once, then one create command). +- **SC-003**: 90% of first-time users can successfully query data without consulting documentation beyond `--help`. +- **SC-004**: All error messages include a suggested corrective action (not just a failure description). +- **SC-005**: The CLI supports all CRUD operations and schema discovery as a single installable command alongside `infrahubctl`. diff --git a/specs/001-end-user-cli/tasks.md b/specs/001-end-user-cli/tasks.md new file mode 100644 index 00000000..031c08d6 --- /dev/null +++ b/specs/001-end-user-cli/tasks.md @@ -0,0 +1,277 @@ +# Tasks: End-User CLI (`infrahub` command) + +**Input**: Design documents from `/specs/001-end-user-cli/` +**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/ + +**Tests**: Tests are REQUIRED per FR-015. Unit tests for all public functions, integration tests for Infrahub server interactions. + +**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story. + +## Format: `[ID] [P?] [Story] Description` + +- **[P]**: Can run in parallel (different files, no dependencies) +- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3) +- Include exact file paths in descriptions + +## Path Conventions + +- **Source**: `infrahub_sdk/ctl/` (extends existing CLI package) +- **Unit tests**: `tests/unit/ctl/` +- **Integration tests**: `tests/integration/` + +--- + +## Phase 1: Setup (Shared Infrastructure) + +**Purpose**: Create the `infrahub` entry point and package structure + +- [x] T001 Add `infrahub` entry point to `[project.scripts]` in pyproject.toml pointing to `infrahub_sdk.ctl.enduser_cli:app` +- [x] T002 Create CLI entry point module in infrahub_sdk/ctl/enduser_cli.py with AsyncTyper app and error-handling wrapper (matching infrahub_sdk/ctl/cli.py pattern) +- [x] T003 [P] Create commands package with infrahub_sdk/ctl/commands/__init__.py +- [x] T004 [P] Create formatters package with infrahub_sdk/ctl/formatters/__init__.py + +--- + +## Phase 2: Foundational (Blocking Prerequisites) + +**Purpose**: Core infrastructure reused by ALL user story commands + +**CRITICAL**: No user story work can begin until this phase is complete + +- [x] T005 Implement `--set` flag parser (parse `key=value` strings into dict) in infrahub_sdk/ctl/parsers.py +- [x] T006 Implement `--filter` flag parser (parse `attr__value=x` strings into kwargs dict) in infrahub_sdk/ctl/parsers.py +- [x] T007 Implement output format auto-detection (TTY → table, piped → json) and `OutputFormat` enum in infrahub_sdk/ctl/formatters/__init__.py +- [x] T008 [P] Implement base formatter protocol with `format_list()` and `format_detail()` methods in infrahub_sdk/ctl/formatters/base.py +- [x] T009 [P] Implement Rich table formatter (list view: attribute + relationship columns with display names; detail view: key-value pairs) in infrahub_sdk/ctl/formatters/table.py +- [x] T010 [P] Implement JSON formatter (list and detail mode) in infrahub_sdk/ctl/formatters/json.py +- [x] T011 [P] Implement CSV formatter (list mode; detail mode falls back to key-value) in infrahub_sdk/ctl/formatters/csv.py +- [x] T012 [P] Implement Infrahub Object YAML formatter (serialize nodes to apiVersion/kind/spec.kind/spec.data structure, round-trippable with ObjectFile) in infrahub_sdk/ctl/formatters/yaml.py +- [x] T013 Create command registration module in infrahub_sdk/ctl/enduser_commands.py (register all command groups on the app) +- [x] T014 [P] Write unit tests for set/filter parsers in tests/unit/ctl/test_parsers.py +- [x] T015 [P] Write unit tests for table formatter in tests/unit/ctl/formatters/test_table.py +- [x] T016 [P] Write unit tests for JSON formatter in tests/unit/ctl/formatters/test_json.py +- [x] T017 [P] Write unit tests for CSV formatter in tests/unit/ctl/formatters/test_csv.py +- [x] T018 [P] Write unit tests for YAML formatter (verify round-trip structure matches InfrahubObjectFileData) in tests/unit/ctl/formatters/test_yaml.py + +**Checkpoint**: Foundation ready - all formatters, parsers, and app skeleton in place. User story commands can now be implemented. + +--- + +## Phase 3: User Story 1 - Query Data (Priority: P1) MVP + +**Goal**: Users can retrieve data from Infrahub with `infrahub get ` (list) and `infrahub get ` (detail), with filtering, pagination, and all output formats. + +**Independent Test**: Run `infrahub get ` against an Infrahub instance and verify formatted output. Test all four output formats. Test `--filter`, `--limit`, `--offset`, `--branch`. + +### Tests for User Story 1 + +- [x] T019 [P] [US1] Write unit tests for get command (list mode, detail mode, invalid kind error, filter passthrough, pagination args, output format selection) in tests/unit/ctl/commands/test_get.py +- [x] T020 [P] [US1] Write integration test for get command (query real data, verify table/json/yaml/csv output) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 1 + +- [x] T021 [US1] Implement `infrahub get` command with list mode (`client.filters()` with kwargs from --filter, --limit, --offset, --branch) and detail mode (`client.get()` with identifier) in infrahub_sdk/ctl/commands/get.py +- [x] T022 [US1] Wire get command into enduser_commands.py and verify `infrahub get` works end-to-end +- [x] T023 [US1] Add error handling for invalid kind (suggest similar kinds from schema), not-found identifier, and connection failures in infrahub_sdk/ctl/commands/get.py + +**Checkpoint**: `infrahub get` fully functional with all output formats, filtering, pagination, detail view. MVP complete. + +--- + +## Phase 4: User Story 2 - Create Objects (Priority: P2) + +**Goal**: Users can create new objects with `infrahub create --set key=value` or `infrahub create --file objects.yaml`. + +**Independent Test**: Create an object via `--set` flags, then verify it exists with `infrahub get`. Create objects from a YAML file and verify batch results. + +### Tests for User Story 2 + +- [x] T024 [P] [US2] Write unit tests for create command (inline --set, file input, mutual exclusivity of --set/--file, validation errors, batch summary) in tests/unit/ctl/commands/test_create.py +- [x] T025 [P] [US2] Write integration test for create command (create via --set, create via --file, verify with get) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 2 + +- [x] T026 [US2] Implement `infrahub create` command with inline mode (`client.create()` + `node.save()` using parsed --set data) and file mode (load via ObjectFile, validate, process) in infrahub_sdk/ctl/commands/create.py +- [x] T027 [US2] Wire create command into enduser_commands.py +- [x] T028 [US2] Add validation error handling (invalid fields → show valid attribute/relationship names from schema) and batch result summary in infrahub_sdk/ctl/commands/create.py + +**Checkpoint**: `infrahub create` works with both inline and file input. Users can create and then query back objects. + +--- + +## Phase 5: User Story 3 - Update Objects (Priority: P3) + +**Goal**: Users can update existing objects with `infrahub update --set key=value` or `--file`. + +**Independent Test**: Update an attribute on an existing object, then query it to verify the change. Show old vs new values in confirmation. + +### Tests for User Story 3 + +- [x] T029 [P] [US3] Write unit tests for update command (inline --set, file input, not-found error, old/new value display) in tests/unit/ctl/commands/test_update.py +- [x] T030 [P] [US3] Write integration test for update command (update attribute, verify change persists) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 3 + +- [x] T031 [US3] Implement `infrahub update` command (`client.get()` to fetch node, apply --set changes to attributes/relationships, `node.save()`, display old → new values) in infrahub_sdk/ctl/commands/update.py +- [x] T032 [US3] Wire update command into enduser_commands.py +- [x] T033 [US3] Add file-based update mode and not-found error handling in infrahub_sdk/ctl/commands/update.py + +**Checkpoint**: `infrahub update` works. Full create → query → update → query cycle verified. + +--- + +## Phase 6: User Story 4 - Delete Objects (Priority: P4) + +**Goal**: Users can delete objects with `infrahub delete ` with confirmation prompt and `--yes` bypass. + +**Independent Test**: Create an object, delete it (with and without --yes), verify it no longer appears in query results. + +### Tests for User Story 4 + +- [x] T034 [P] [US4] Write unit tests for delete command (confirmation prompt, --yes bypass, not-found, dependency conflict error) in tests/unit/ctl/commands/test_delete.py +- [x] T035 [P] [US4] Write integration test for delete command (create, delete, verify gone) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 4 + +- [x] T036 [US4] Implement `infrahub delete` command (`client.get()` to fetch, confirmation prompt via typer.confirm(), `node.delete()`, --yes flag to skip) in infrahub_sdk/ctl/commands/delete.py +- [x] T037 [US4] Wire delete command into enduser_commands.py +- [x] T038 [US4] Add dependency conflict error handling (catch server error, display dependent objects) in infrahub_sdk/ctl/commands/delete.py + +**Checkpoint**: Full CRUD cycle complete. All data operations functional. + +--- + +## Phase 7: User Story 5 - Schema Discovery (Priority: P5) + +**Goal**: Users can explore the data model with `infrahub schema list` and `infrahub schema show `. + +**Independent Test**: List all schema kinds, verify output matches actual schema. Show a specific kind's attributes and relationships. + +### Tests for User Story 5 + +- [x] T039 [P] [US5] Write unit tests for schema list and schema show commands (list with filter, show with valid/invalid kind, attribute/relationship table output) in tests/unit/ctl/commands/test_schema.py +- [x] T040 [P] [US5] Write integration test for schema commands (list against real instance, show known kind) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 5 + +- [x] T041 [US5] Implement `infrahub schema list` command (`client.schema.all()`, filter by substring, display table with Namespace/Name/Kind/Description columns) in infrahub_sdk/ctl/commands/schema.py +- [x] T042 [US5] Implement `infrahub schema show ` command (`client.schema.get()`, display metadata + attributes table + relationships table) in infrahub_sdk/ctl/commands/schema.py +- [x] T043 [US5] Wire schema command group into enduser_commands.py + +**Checkpoint**: All 5 user stories complete. Full CLI feature set available. + +--- + +## Phase 8: Polish & Cross-Cutting Concerns + +**Purpose**: Quality gates, documentation, and validation + +- [x] T044 Run `uv run invoke format` and `uv run invoke lint-code` across all new files +- [x] T045 Run `uv run pytest tests/unit/ctl/` to verify all unit tests pass +- [x] T046 Run `uv run invoke docs-generate` and `uv run invoke docs-validate` to update CLI documentation +- [x] T047 Verify type checking passes: `uv run invoke lint` (includes mypy and ty) +- [x] T048 Run quickstart.md validation: manually execute the quickstart steps against a test instance +- [x] T049 [P] Add Google-style docstrings to all new modules, classes, and public functions if not already present + +--- + +## Dependencies & Execution Order + +### Phase Dependencies + +- **Setup (Phase 1)**: No dependencies - can start immediately +- **Foundational (Phase 2)**: Depends on Setup (T001-T004) - BLOCKS all user stories +- **User Stories (Phase 3-7)**: All depend on Foundational phase completion + - US1 (get) has no dependencies on other stories + - US2 (create) has no dependencies on other stories (reuses parsers from Phase 2) + - US3 (update) has no dependencies on other stories + - US4 (delete) has no dependencies on other stories + - US5 (schema) has no dependencies on other stories +- **Polish (Phase 8)**: Depends on all user stories being complete + +### User Story Dependencies + +- **US1 (P1)**: Can start after Phase 2. No cross-story dependencies. +- **US2 (P2)**: Can start after Phase 2. Independent of US1 (uses same parsers/formatters). +- **US3 (P3)**: Can start after Phase 2. Independent of US1/US2. +- **US4 (P4)**: Can start after Phase 2. Independent of US1/US2/US3. +- **US5 (P5)**: Can start after Phase 2. Independent of all other stories. + +### Within Each User Story + +- Unit tests written first (marked [P] where independent) +- Command implementation after tests exist +- Integration into enduser_commands.py after command works +- Error handling as final step in each story + +### Parallel Opportunities + +- T003, T004: Package init files can be created in parallel +- T008-T012: All formatters can be implemented in parallel (different files) +- T014-T018: All foundational unit tests can run in parallel +- T019, T020: US1 tests can be written in parallel +- T024, T025: US2 tests can be written in parallel +- After Phase 2, all user stories (Phase 3-7) can proceed in parallel + +--- + +## Parallel Example: Foundational Phase + +```text +# Launch all formatters in parallel (different files, no dependencies): +Task: T008 "Base formatter protocol in infrahub_sdk/ctl/formatters/base.py" +Task: T009 "Rich table formatter in infrahub_sdk/ctl/formatters/table.py" +Task: T010 "JSON formatter in infrahub_sdk/ctl/formatters/json.py" +Task: T011 "CSV formatter in infrahub_sdk/ctl/formatters/csv.py" +Task: T012 "YAML formatter in infrahub_sdk/ctl/formatters/yaml.py" + +# Launch all formatter tests in parallel: +Task: T015 "Table formatter tests in tests/unit/ctl/formatters/test_table.py" +Task: T016 "JSON formatter tests in tests/unit/ctl/formatters/test_json.py" +Task: T017 "CSV formatter tests in tests/unit/ctl/formatters/test_csv.py" +Task: T018 "YAML formatter tests in tests/unit/ctl/formatters/test_yaml.py" +``` + +--- + +## Implementation Strategy + +### MVP First (User Story 1 Only) + +1. Complete Phase 1: Setup (T001-T004) +2. Complete Phase 2: Foundational (T005-T018) +3. Complete Phase 3: User Story 1 - Query (T019-T023) +4. **STOP and VALIDATE**: `infrahub get ` works with all output formats +5. Demo/review if ready + +### Incremental Delivery + +1. Setup + Foundational → CLI skeleton with formatters ready +2. Add US1 (Query) → MVP: read-only data access +3. Add US2 (Create) → Users can populate data +4. Add US3 (Update) → Full data management +5. Add US4 (Delete) → Complete CRUD lifecycle +6. Add US5 (Schema) → Self-service discovery +7. Polish → Production-ready + +### Parallel Agent Strategy + +With multiple agents: + +1. Complete Setup + Foundational together +2. Once Foundational is done, dispatch in parallel: + - Agent A: US1 (Query) + US5 (Schema) — both read-only + - Agent B: US2 (Create) + US3 (Update) — both write operations + - Agent C: US4 (Delete) — standalone +3. All stories integrate independently via enduser_commands.py + +--- + +## Notes + +- [P] tasks = different files, no dependencies +- [Story] label maps task to specific user story for traceability +- Each user story is independently completable and testable +- Unit tests mock the SDK client; integration tests hit a real Infrahub instance +- Commit after each phase or logical task group +- Stop at any checkpoint to validate independently diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py new file mode 100644 index 00000000..3648a695 --- /dev/null +++ b/tests/integration/test_enduser_cli.py @@ -0,0 +1,237 @@ +"""Integration tests for the ``infrahub`` end-user CLI. + +Requires a running Infrahub instance with the TestingAnimal schema loaded. +Uses the same ``TestInfrahubDockerClient`` + ``SchemaAnimal`` fixtures as +the existing ``test_infrahubctl.py`` integration tests. +""" + +from __future__ import annotations + +import json +import os +from typing import TYPE_CHECKING + +import pytest +import yaml +from typer.testing import CliRunner + +from infrahub_sdk.ctl import config +from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.parameters import load_configuration +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from infrahub_sdk.testing.schemas.animal import SchemaAnimal + +if TYPE_CHECKING: + from collections.abc import Generator + + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + +runner = CliRunner() + + +class TestEnduserCli(TestInfrahubDockerClient, SchemaAnimal): + """Integration tests for the ``infrahub`` CLI against a live Infrahub instance.""" + + @pytest.fixture(scope="class") + async def base_dataset( + self, + client: InfrahubClient, + load_schema: None, + person_liam: InfrahubNode, + person_ethan: InfrahubNode, + person_sophia: InfrahubNode, + cat_luna: InfrahubNode, + cat_bella: InfrahubNode, + dog_daisy: InfrahubNode, + dog_rocky: InfrahubNode, + ctl_client_config: Generator[None, None, None], + ) -> None: + """Ensure schema and test data are loaded before running tests.""" + + @pytest.fixture(scope="class") + def ctl_client_config(self, client: InfrahubClient) -> Generator[None, None, None]: + """Configure the CLI to talk to the test Infrahub instance.""" + load_configuration(value="infrahubctl.toml") + assert config.SETTINGS._settings + config.SETTINGS._settings.server_address = client.config.address + original_username = os.environ.get("INFRAHUB_USERNAME") + original_password = os.environ.get("INFRAHUB_PASSWORD") + if client.config.username and client.config.password: + os.environ["INFRAHUB_USERNAME"] = client.config.username + os.environ["INFRAHUB_PASSWORD"] = client.config.password + yield + if original_username: + os.environ["INFRAHUB_USERNAME"] = original_username + if original_password: + os.environ["INFRAHUB_PASSWORD"] = original_password + + # ------------------------------------------------------------------ + # infrahub --version + # ------------------------------------------------------------------ + + def test_version(self) -> None: + """Verify the --version flag works without a server.""" + result = runner.invoke(app, ["--version"]) + assert result.exit_code == 0 + assert "infrahub v" in result.stdout + + # ------------------------------------------------------------------ + # infrahub schema (US5) + # ------------------------------------------------------------------ + + def test_schema_list(self, base_dataset: None) -> None: + """List schema kinds and verify TestingPerson is present.""" + result = runner.invoke(app, ["schema", "list"]) + assert result.exit_code == 0 + assert "TestingPerson" in result.stdout + + def test_schema_list_with_filter(self, base_dataset: None) -> None: + """Filter schema list by substring.""" + result = runner.invoke(app, ["schema", "list", "--filter", "Dog"]) + assert result.exit_code == 0 + assert "TestingDog" in result.stdout + assert "TestingCat" not in result.stdout + + def test_schema_show(self, base_dataset: None) -> None: + """Show details of a schema kind including attributes and relationships.""" + result = runner.invoke(app, ["schema", "show", "TestingPerson"]) + assert result.exit_code == 0 + assert "TestingPerson" in result.stdout + assert "name" in result.stdout + assert "height" in result.stdout + assert "animals" in result.stdout + + # ------------------------------------------------------------------ + # infrahub get (US1) + # ------------------------------------------------------------------ + + def test_get_list_table(self, base_dataset: None) -> None: + """Query all persons and verify table output contains known names.""" + result = runner.invoke(app, ["get", "TestingPerson"]) + assert result.exit_code == 0 + assert "Ethan Carter" in result.stdout + assert "Liam Walker" in result.stdout + + def test_get_list_json(self, base_dataset: None) -> None: + """Query all persons with JSON output and verify valid JSON array.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert isinstance(data, list) + assert len(data) >= 3 + names = [item.get("name", "") for item in data] + assert "Ethan Carter" in names + + def test_get_list_csv(self, base_dataset: None) -> None: + """Query all persons with CSV output.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "csv"]) + assert result.exit_code == 0 + assert "name" in result.stdout + assert "Ethan Carter" in result.stdout + + def test_get_list_yaml(self, base_dataset: None) -> None: + """Query all persons with YAML output in Infrahub object format.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "yaml"]) + assert result.exit_code == 0 + doc = yaml.safe_load(result.stdout) + assert doc["apiVersion"] == "infrahub.app/v1" + assert doc["kind"] == "Object" + assert doc["spec"]["kind"] == "TestingPerson" + assert isinstance(doc["spec"]["data"], list) + names = [item.get("name", "") for item in doc["spec"]["data"]] + assert "Ethan Carter" in names + + def test_get_list_with_filter(self, base_dataset: None) -> None: + """Query persons filtered by name.""" + result = runner.invoke(app, ["get", "TestingPerson", "--filter", "name__value=Liam Walker", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert len(data) == 1 + assert data[0]["name"] == "Liam Walker" + + def test_get_list_with_limit(self, base_dataset: None) -> None: + """Query persons with a limit on results.""" + result = runner.invoke(app, ["get", "TestingPerson", "--limit", "1", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert len(data) == 1 + + def test_get_detail(self, base_dataset: None) -> None: + """Get detail view of a single person by display name.""" + result = runner.invoke(app, ["get", "TestingPerson", "Ethan Carter"]) + assert result.exit_code == 0 + assert "Ethan Carter" in result.stdout + assert "185" in result.stdout + + def test_get_detail_json(self, base_dataset: None) -> None: + """Get detail view in JSON format.""" + result = runner.invoke(app, ["get", "TestingPerson", "Ethan Carter", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert data["kind"] == "TestingPerson" + assert data["display_label"] + + def test_get_invalid_kind(self, base_dataset: None) -> None: + """Querying an invalid kind returns an error.""" + result = runner.invoke(app, ["get", "NonExistentKind"]) + assert result.exit_code != 0 + + # ------------------------------------------------------------------ + # infrahub create (US2) + # ------------------------------------------------------------------ + + async def test_create_inline(self, base_dataset: None, client: InfrahubClient) -> None: + """Create a person using inline --set flags.""" + result = runner.invoke( + app, + ["create", "TestingPerson", "--set", "name=Integration Test Person", "--set", "height=190"], + ) + assert result.exit_code == 0 + assert "Created" in result.stdout + assert "Integration Test Person" in result.stdout + + # Verify via SDK + node = await client.get(kind="TestingPerson", id="Integration Test Person") + assert node.name.value == "Integration Test Person" # type: ignore[union-attr] + assert node.height.value == 190 # type: ignore[union-attr] + + def test_create_missing_args(self, base_dataset: None) -> None: + """Create without --set or --file fails.""" + result = runner.invoke(app, ["create", "TestingPerson"]) + assert result.exit_code != 0 + + # ------------------------------------------------------------------ + # infrahub update (US3) + # ------------------------------------------------------------------ + + async def test_update_inline(self, base_dataset: None, client: InfrahubClient) -> None: + """Update a person's height using --set.""" + result = runner.invoke( + app, + ["update", "TestingPerson", "Sophia Walker", "--set", "height=175"], + ) + assert result.exit_code == 0 + assert "Updated" in result.stdout + + # Verify via SDK + node = await client.get(kind="TestingPerson", id="Sophia Walker") + assert node.height.value == 175 # type: ignore[union-attr] + + # ------------------------------------------------------------------ + # infrahub delete (US4) + # ------------------------------------------------------------------ + + async def test_delete_with_yes(self, base_dataset: None, client: InfrahubClient) -> None: + """Delete a person using --yes to skip confirmation.""" + # Create a throwaway object + obj = await client.create(kind="TestingPerson", name="Delete Me", height=100) + await obj.save() + + result = runner.invoke(app, ["delete", "TestingPerson", "Delete Me", "--yes"]) + assert result.exit_code == 0 + assert "Deleted" in result.stdout + + # Verify deleted + node = await client.get(kind="TestingPerson", id="Delete Me", raise_when_missing=False) + assert node is None diff --git a/tests/unit/ctl/commands/__init__.py b/tests/unit/ctl/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/ctl/commands/test_create.py b/tests/unit/ctl/commands/test_create.py new file mode 100644 index 00000000..9013864a --- /dev/null +++ b/tests/unit/ctl/commands/test_create.py @@ -0,0 +1,28 @@ +"""Unit tests for the ``infrahub create`` end-user CLI command.""" + +from __future__ import annotations + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.enduser_cli import app + +runner = CliRunner() + + +def test_create_help() -> None: + """``create --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["create", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_create_mutual_exclusivity() -> None: + """Passing both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["create", "InfraDevice", "--set", "name=router1", "--file", "objects.yml"]) + assert result.exit_code != 0 + + +def test_create_no_args() -> None: + """Omitting both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["create", "InfraDevice"]) + assert result.exit_code != 0 diff --git a/tests/unit/ctl/commands/test_delete.py b/tests/unit/ctl/commands/test_delete.py new file mode 100644 index 00000000..ca257f23 --- /dev/null +++ b/tests/unit/ctl/commands/test_delete.py @@ -0,0 +1,16 @@ +"""Unit tests for the ``infrahub delete`` end-user CLI command.""" + +from __future__ import annotations + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.enduser_cli import app + +runner = CliRunner() + + +def test_delete_help() -> None: + """``delete --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["delete", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py new file mode 100644 index 00000000..4b7e4975 --- /dev/null +++ b/tests/unit/ctl/commands/test_get.py @@ -0,0 +1,113 @@ +"""Unit tests for the ``infrahub get`` end-user CLI command.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.ctl.enduser_cli import app + +runner = CliRunner() + + +def test_get_help() -> None: + """``get --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["get", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_get_list_mode() -> None: + """List mode calls ``client.filters`` and prints node data.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "test-device" + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.filters = AsyncMock(return_value=[mock_node]) + + mock_formatter = MagicMock() + mock_formatter.format_list.return_value = "device-list-output" + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + ): + result = runner.invoke(app, ["get", "InfraDevice"]) + + assert result.exit_code == 0 + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_client.filters.assert_awaited_once() + mock_formatter.format_list.assert_called_once_with([mock_node], mock_schema) + + +def test_get_detail_mode() -> None: + """Detail mode calls ``client.get`` when an identifier is supplied.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "test-device" + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.get = AsyncMock(return_value=mock_node) + + mock_formatter = MagicMock() + mock_formatter.format_detail.return_value = '{"id": "abc-123"}' + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + ): + result = runner.invoke(app, ["get", "InfraDevice", "abc-123"]) + + assert result.exit_code == 0 + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="abc-123") + mock_formatter.format_detail.assert_called_once_with(mock_node, mock_schema) + + +@pytest.mark.parametrize( + "extra_args", + [ + ["--branch", "my-branch"], + ["--limit", "10"], + ["--offset", "5"], + ["--filter", "name__value=router1"], + ], +) +def test_get_list_mode_with_options(extra_args: list[str]) -> None: + """List mode accepts optional flags without error.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = [] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.filters = AsyncMock(return_value=[]) + + mock_formatter = MagicMock() + mock_formatter.format_list.return_value = "[]" + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + ): + result = runner.invoke(app, ["get", "InfraDevice", *extra_args]) + + assert result.exit_code == 0 diff --git a/tests/unit/ctl/commands/test_schema.py b/tests/unit/ctl/commands/test_schema.py new file mode 100644 index 00000000..51c28408 --- /dev/null +++ b/tests/unit/ctl/commands/test_schema.py @@ -0,0 +1,23 @@ +"""Unit tests for the ``infrahub schema`` end-user CLI subcommand group.""" + +from __future__ import annotations + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.enduser_cli import app + +runner = CliRunner() + + +def test_schema_list_help() -> None: + """``schema list --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["schema", "list", "--help"]) + assert result.exit_code == 0 + assert "Usage" in result.stdout + + +def test_schema_show_help() -> None: + """``schema show --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["schema", "show", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py new file mode 100644 index 00000000..0480f265 --- /dev/null +++ b/tests/unit/ctl/commands/test_update.py @@ -0,0 +1,31 @@ +"""Unit tests for the ``infrahub update`` end-user CLI command.""" + +from __future__ import annotations + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.enduser_cli import app + +runner = CliRunner() + + +def test_update_help() -> None: + """``update --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["update", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_update_mutual_exclusivity() -> None: + """Passing both --set and --file exits with a non-zero code.""" + result = runner.invoke( + app, + ["update", "InfraDevice", "abc-123", "--set", "name=router1", "--file", "objects.yml"], + ) + assert result.exit_code != 0 + + +def test_update_no_args() -> None: + """Omitting both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["update", "InfraDevice", "abc-123"]) + assert result.exit_code != 0 diff --git a/tests/unit/ctl/formatters/__init__.py b/tests/unit/ctl/formatters/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/ctl/formatters/test_csv.py b/tests/unit/ctl/formatters/test_csv.py new file mode 100644 index 00000000..ae1960f6 --- /dev/null +++ b/tests/unit/ctl/formatters/test_csv.py @@ -0,0 +1,247 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.csv (CsvFormatter).""" + +from __future__ import annotations + +import csv +import io +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.csv import CsvFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +def _parse_csv(text: str) -> list[dict[str, str]]: + """Parse a CSV string into a list of row dicts. + + Args: + text: CSV-formatted string. + + Returns: + List of dicts keyed by header row values. + """ + return list(csv.DictReader(io.StringIO(text))) + + +class TestCsvFormatterFormatList: + """Tests for CsvFormatter.format_list.""" + + def test_format_list_returns_string(self) -> None: + """Test that format_list returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + assert isinstance(result, str) + + def test_format_list_has_header_row_with_attribute_name(self) -> None: + """Test that the first row contains attribute column headers.""" + schema = _make_mock_schema(["name", "status"], []) + node = _make_mock_node({"name": "router1", "status": "active"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert "name" in rows[0] + assert "status" in rows[0] + + def test_format_list_has_header_row_with_relationship_name(self) -> None: + """Test that the first row contains relationship column headers.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert "site" in rows[0] + + def test_format_list_data_row_contains_attribute_value(self) -> None: + """Test that data rows contain the node attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert rows[0]["name"] == "router1" + + def test_format_list_data_row_contains_relationship_value(self) -> None: + """Test that data rows contain the relationship display label.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert rows[0]["site"] == "DC1" + + def test_format_list_one_data_row_per_node(self) -> None: + """Test that format_list produces one data row per node.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = CsvFormatter() + + result = formatter.format_list([node1, node2], schema) + + rows = _parse_csv(result) + assert len(rows) == 2 + + def test_format_list_empty_nodes_returns_header_only(self) -> None: + """Test that format_list with no nodes returns only the header row.""" + schema = _make_mock_schema(["name"], []) + formatter = CsvFormatter() + + result = formatter.format_list([], schema) + + rows = _parse_csv(result) + assert rows == [] + assert "name" in result + + +class TestCsvFormatterFormatDetail: + """Tests for CsvFormatter.format_detail.""" + + def test_format_detail_returns_string(self) -> None: + """Test that format_detail returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + assert isinstance(result, str) + + def test_format_detail_has_field_value_headers(self) -> None: + """Test that format_detail output has field and value column headers.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + assert "field" in rows[0] + assert "value" in rows[0] + + def test_format_detail_contains_id_row(self) -> None: + """Test that format_detail includes a row for the node id.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + id_row = next(r for r in rows if r["field"] == "id") + assert id_row["value"] == "abc-123" + + def test_format_detail_contains_display_label_row(self) -> None: + """Test that format_detail includes a row for display_label.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + label_row = next(r for r in rows if r["field"] == "display_label") + assert label_row["value"] == "Router One" + + def test_format_detail_contains_kind_row(self) -> None: + """Test that format_detail includes a row for the schema kind.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + kind_row = next(r for r in rows if r["field"] == "kind") + assert kind_row["value"] == "InfraDevice" + + def test_format_detail_contains_attribute_row(self) -> None: + """Test that format_detail includes a row for each attribute.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + name_row = next(r for r in rows if r["field"] == "name") + assert name_row["value"] == "router1" + + def test_format_detail_contains_relationship_row(self) -> None: + """Test that format_detail includes a row for each relationship.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + site_row = next(r for r in rows if r["field"] == "site") + assert site_row["value"] == "DC1" diff --git a/tests/unit/ctl/formatters/test_json.py b/tests/unit/ctl/formatters/test_json.py new file mode 100644 index 00000000..b7cc1caa --- /dev/null +++ b/tests/unit/ctl/formatters/test_json.py @@ -0,0 +1,201 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.json (JsonFormatter).""" + +from __future__ import annotations + +import json +from typing import TYPE_CHECKING, cast +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.json import JsonFormatter + +if TYPE_CHECKING: + from infrahub_sdk.node import InfrahubNode + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +class TestJsonFormatterFormatList: + """Tests for JsonFormatter.format_list.""" + + def test_format_list_returns_valid_json(self) -> None: + """Test that format_list output is valid JSON.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert isinstance(parsed, list) + + def test_format_list_contains_attribute_value(self) -> None: + """Test that format_list includes the node attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert parsed[0]["name"] == "router1" + + def test_format_list_multiple_nodes(self) -> None: + """Test that format_list produces one array entry per node.""" + schema = _make_mock_schema(["name"], []) + nodes = [ + _make_mock_node({"name": "router1"}, {}, node_id="id-1"), + _make_mock_node({"name": "router2"}, {}, node_id="id-2"), + ] + formatter = JsonFormatter() + + result = formatter.format_list(cast("list[InfrahubNode]", nodes), schema) + + parsed = json.loads(result) + assert len(parsed) == 2 + + def test_format_list_empty_list_returns_json_array(self) -> None: + """Test that format_list with an empty node list returns a JSON empty array.""" + schema = _make_mock_schema(["name"], []) + formatter = JsonFormatter() + + result = formatter.format_list([], schema) + + assert result.strip() == "[]" + + def test_format_list_includes_relationship_value(self) -> None: + """Test that format_list includes relationship display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert parsed[0]["site"] == "DC1" + + +class TestJsonFormatterFormatDetail: + """Tests for JsonFormatter.format_detail.""" + + def test_format_detail_returns_valid_json(self) -> None: + """Test that format_detail output is valid JSON.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert isinstance(parsed, dict) + + def test_format_detail_contains_id(self) -> None: + """Test that format_detail includes the node id field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["id"] == "abc-123" + + def test_format_detail_contains_display_label(self) -> None: + """Test that format_detail includes the display_label metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["display_label"] == "Router One" + + def test_format_detail_contains_kind(self) -> None: + """Test that format_detail includes the kind metadata field from schema.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["kind"] == "InfraDevice" + + def test_format_detail_contains_attribute_value(self) -> None: + """Test that format_detail includes attribute values nested under their name.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["name"]["value"] == "router1" + + def test_format_detail_contains_relationship(self) -> None: + """Test that format_detail includes relationship data.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert "site" in parsed + assert parsed["site"]["display_label"] == "DC1" diff --git a/tests/unit/ctl/formatters/test_table.py b/tests/unit/ctl/formatters/test_table.py new file mode 100644 index 00000000..967e9b23 --- /dev/null +++ b/tests/unit/ctl/formatters/test_table.py @@ -0,0 +1,210 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.table (TableFormatter).""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.table import TableFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +class TestTableFormatterFormatList: + """Tests for TableFormatter.format_list.""" + + def test_format_list_returns_string(self) -> None: + """Test that format_list returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert isinstance(result, str) + + def test_format_list_contains_attribute_column_header(self) -> None: + """Test that format_list output includes attribute names as column headers.""" + schema = _make_mock_schema(["name", "status"], []) + node = _make_mock_node({"name": "router1", "status": "active"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "name" in result + assert "status" in result + + def test_format_list_contains_relationship_column_header(self) -> None: + """Test that format_list output includes relationship names as column headers.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "site" in result + + def test_format_list_contains_attribute_value(self) -> None: + """Test that format_list output includes node attribute values.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "router1" in result + + def test_format_list_contains_relationship_value(self) -> None: + """Test that format_list output includes relationship display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "DC1" in result + + def test_format_list_multiple_nodes(self) -> None: + """Test that format_list renders all nodes.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = TableFormatter() + + result = formatter.format_list([node1, node2], schema) + + assert "router1" in result + assert "router2" in result + + +class TestTableFormatterFormatDetail: + """Tests for TableFormatter.format_detail.""" + + def test_format_detail_returns_string(self) -> None: + """Test that format_detail returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert isinstance(result, str) + + def test_format_detail_contains_field_column_header(self) -> None: + """Test that format_detail output includes the Field column header.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "Field" in result + + def test_format_detail_contains_value_column_header(self) -> None: + """Test that format_detail output includes the Value column header.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "Value" in result + + def test_format_detail_contains_id_field(self) -> None: + """Test that format_detail output includes the id metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "id" in result + assert "abc-123" in result + + def test_format_detail_contains_display_label_field(self) -> None: + """Test that format_detail output includes the display_label metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "display_label" in result + assert "Router One" in result + + def test_format_detail_contains_attribute_name_and_value(self) -> None: + """Test that format_detail includes attribute field names and values.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "name" in result + assert "router1" in result + + def test_format_detail_contains_relationship_name_and_value(self) -> None: + """Test that format_detail includes relationship field names and display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "site" in result + assert "DC1" in result diff --git a/tests/unit/ctl/formatters/test_yaml.py b/tests/unit/ctl/formatters/test_yaml.py new file mode 100644 index 00000000..d64b34af --- /dev/null +++ b/tests/unit/ctl/formatters/test_yaml.py @@ -0,0 +1,206 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.yaml (YamlFormatter).""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +import yaml # pyright: ignore[reportMissingModuleSource] + +from infrahub_sdk.ctl.formatters.yaml import YamlFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +class TestYamlFormatterFormatList: + """Tests for YamlFormatter.format_list.""" + + def test_format_list_produces_valid_yaml(self) -> None: + """Test that format_list output can be parsed as valid YAML.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed, dict) + + def test_format_list_contains_api_version(self) -> None: + """Test that format_list output contains the infrahub apiVersion field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["apiVersion"] == "infrahub.app/v1" + + def test_format_list_contains_kind_object(self) -> None: + """Test that format_list output has kind set to Object.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["kind"] == "Object" + + def test_format_list_spec_kind_matches_schema(self) -> None: + """Test that spec.kind matches the schema kind.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["kind"] == "InfraDevice" + + def test_format_list_spec_data_is_list(self) -> None: + """Test that spec.data is a list.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed["spec"]["data"], list) + + def test_format_list_data_contains_attribute_value(self) -> None: + """Test that spec.data entries contain the attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["name"] == "router1" + + def test_format_list_data_one_entry_per_node(self) -> None: + """Test that spec.data contains one entry per node.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = YamlFormatter() + + result = formatter.format_list([node1, node2], schema) + + parsed = yaml.safe_load(result) + assert len(parsed["spec"]["data"]) == 2 + + def test_format_list_starts_with_document_separator(self) -> None: + """Test that the YAML output starts with the --- document separator.""" + schema = _make_mock_schema(["name"], []) + formatter = YamlFormatter() + + result = formatter.format_list([], schema) + + assert result.startswith("---") + + +class TestYamlFormatterFormatDetail: + """Tests for YamlFormatter.format_detail.""" + + def test_format_detail_produces_valid_yaml(self) -> None: + """Test that format_detail output can be parsed as valid YAML.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed, dict) + + def test_format_detail_spec_data_has_single_entry(self) -> None: + """Test that format_detail produces exactly one entry in spec.data.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert len(parsed["spec"]["data"]) == 1 + + def test_format_detail_data_entry_contains_attribute(self) -> None: + """Test that the single spec.data entry contains the attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["name"] == "router1" + + def test_format_detail_relationship_uses_display_label(self) -> None: + """Test that relationship values are stored as display_label strings.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["site"] == "DC1" diff --git a/tests/unit/ctl/test_parsers.py b/tests/unit/ctl/test_parsers.py new file mode 100644 index 00000000..1a056716 --- /dev/null +++ b/tests/unit/ctl/test_parsers.py @@ -0,0 +1,128 @@ +"""Unit tests for infrahub_sdk.ctl.parsers.""" + +from __future__ import annotations + +import pytest +import typer + +from infrahub_sdk.ctl.parsers import parse_filter_args, parse_set_args, validate_set_fields + + +class TestParseSetArgs: + """Tests for parse_set_args.""" + + def test_single_key_value_pair(self) -> None: + """Test parse_set_args with a single valid key=value argument.""" + result = parse_set_args(["name=router1"]) + assert result == {"name": "router1"} + + def test_multiple_key_value_pairs(self) -> None: + """Test parse_set_args with multiple valid key=value arguments.""" + result = parse_set_args(["name=router1", "status=active"]) + assert result == {"name": "router1", "status": "active"} + + def test_value_containing_equals_sign(self) -> None: + """Test that only the first = is used as the split point.""" + result = parse_set_args(["description=a=b=c"]) + assert result == {"description": "a=b=c"} + + def test_empty_list(self) -> None: + """Test parse_set_args with an empty list returns an empty dict.""" + result = parse_set_args([]) + assert result == {} + + def test_missing_equals_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when = is absent.""" + with pytest.raises(typer.BadParameter, match="Invalid format"): + parse_set_args(["nameonly"]) + + def test_empty_key_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when key is empty.""" + with pytest.raises(typer.BadParameter, match="Key must not be empty"): + parse_set_args(["=value"]) + + def test_whitespace_only_key_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when key is only whitespace.""" + with pytest.raises(typer.BadParameter, match="Key must not be empty"): + parse_set_args([" =value"]) + + def test_value_can_be_empty_string(self) -> None: + """Test parse_set_args accepts an empty string value.""" + result = parse_set_args(["name="]) + assert result == {"name": ""} + + +class TestParseFilterArgs: + """Tests for parse_filter_args.""" + + def test_single_filter_argument(self) -> None: + """Test parse_filter_args with a single valid filter argument.""" + result = parse_filter_args(["name__value=router1"]) + assert result == {"name__value": "router1"} + + def test_multiple_filter_arguments(self) -> None: + """Test parse_filter_args with multiple valid filter arguments.""" + result = parse_filter_args(["name__value=router1", "status__value=active"]) + assert result == {"name__value": "router1", "status__value": "active"} + + def test_empty_list(self) -> None: + """Test parse_filter_args with an empty list returns an empty dict.""" + result = parse_filter_args([]) + assert result == {} + + def test_missing_equals_raises_bad_parameter(self) -> None: + """Test parse_filter_args raises BadParameter when = is absent.""" + with pytest.raises(typer.BadParameter, match="Invalid format"): + parse_filter_args(["name__value"]) + + def test_value_containing_equals_sign(self) -> None: + """Test that only the first = splits the filter argument.""" + result = parse_filter_args(["description__value=x=y"]) + assert result == {"description__value": "x=y"} + + +class TestValidateSetFields: + """Tests for validate_set_fields.""" + + def test_all_attribute_fields_valid(self) -> None: + """Test validate_set_fields passes when all keys are valid attribute names.""" + data = {"name": "router1", "status": "active"} + validate_set_fields(data, attribute_names=["name", "status"], relationship_names=[]) + + def test_all_relationship_fields_valid(self) -> None: + """Test validate_set_fields passes when all keys are valid relationship names.""" + data = {"site": "dc1"} + validate_set_fields(data, attribute_names=[], relationship_names=["site"]) + + def test_mixed_attribute_and_relationship_fields_valid(self) -> None: + """Test validate_set_fields passes with a mix of attribute and relationship keys.""" + data = {"name": "router1", "site": "dc1"} + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_empty_data_passes(self) -> None: + """Test validate_set_fields passes when data is empty.""" + validate_set_fields({}, attribute_names=["name"], relationship_names=["site"]) + + def test_unknown_field_raises_bad_parameter(self) -> None: + """Test validate_set_fields raises BadParameter for an unknown field.""" + data = {"unknown_field": "value"} + with pytest.raises(typer.BadParameter, match="Unknown field"): + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_error_message_lists_invalid_field(self) -> None: + """Test that the error message includes the invalid field name.""" + data = {"bogus": "value"} + with pytest.raises(typer.BadParameter, match="bogus"): + validate_set_fields(data, attribute_names=["name"], relationship_names=[]) + + def test_error_message_lists_valid_fields(self) -> None: + """Test that the error message includes the list of valid fields.""" + data = {"bogus": "value"} + with pytest.raises(typer.BadParameter, match="name"): + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_multiple_unknown_fields_raises_bad_parameter(self) -> None: + """Test validate_set_fields raises BadParameter listing multiple unknown fields.""" + data = {"bad1": "x", "bad2": "y"} + with pytest.raises(typer.BadParameter, match="bad1"): + validate_set_fields(data, attribute_names=["name"], relationship_names=[]) From 8fba90d654d0f3410ad7ea635ea9db4c3b72d766 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 13:46:51 +0000 Subject: [PATCH 02/32] fix: resolve CI failures in markdown lint, integration tests, and docs - Fix MD060 table column style in plan.md and quickstart.md - Fix MD032 blanks-around-lists in research.md - Fix MD050 strong-style (escaped __init__.py underscores) in tasks.md - Split integration test class to stay under PLR0904 method limit - Make CLI runner tests sync to avoid asyncio.run() nested loop error --- specs/001-end-user-cli/plan.md | 4 +- specs/001-end-user-cli/quickstart.md | 2 +- specs/001-end-user-cli/research.md | 5 +++ specs/001-end-user-cli/tasks.md | 6 +-- tests/integration/test_enduser_cli.py | 53 ++++++++++----------------- 5 files changed, 31 insertions(+), 39 deletions(-) diff --git a/specs/001-end-user-cli/plan.md b/specs/001-end-user-cli/plan.md index c49ea5f9..9f7c31b8 100644 --- a/specs/001-end-user-cli/plan.md +++ b/specs/001-end-user-cli/plan.md @@ -28,7 +28,7 @@ formats including round-trippable Infrahub Object YAML. *GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.* | Principle | Status | Notes | -|-----------|--------|-------| +| --------- | ------ | ----- | | I. Async/Sync Dual Pattern | PASS | CLI commands are async (via AsyncTyper). No new public SDK API surface requiring dual pattern — CLI is async-only consumer. | | II. Type Safety | PASS | All new functions will have type hints. mypy/ty must pass. | | III. Test Discipline | PASS | FR-015 requires unit + integration tests. 70% coverage target. | @@ -103,7 +103,7 @@ testability and reuse across commands. ## Post-Design Constitution Re-Check | Principle | Status | Notes | -|-----------|--------|-------| +| --------- | ------ | ----- | | I. Async/Sync Dual Pattern | PASS | No new public SDK API. CLI is async consumer only. | | II. Type Safety | PASS | All modules typed. No generated code modified. | | III. Test Discipline | PASS | Test structure mirrors source structure. Unit tests mock SDK client. Integration tests hit Infrahub. | diff --git a/specs/001-end-user-cli/quickstart.md b/specs/001-end-user-cli/quickstart.md index 04b8edef..2043a27b 100644 --- a/specs/001-end-user-cli/quickstart.md +++ b/specs/001-end-user-cli/quickstart.md @@ -100,7 +100,7 @@ infrahub delete InfraDevice spine03 --yes ## Output Formats | Format | Flag | Use Case | -|--------|------|----------| +| ------ | ---- | -------- | | Table | `--output table` | Interactive terminal (default) | | JSON | `--output json` | Scripting, piping (default when piped) | | CSV | `--output csv` | Spreadsheet import, data analysis | diff --git a/specs/001-end-user-cli/research.md b/specs/001-end-user-cli/research.md index ea56ebcb..c2373cae 100644 --- a/specs/001-end-user-cli/research.md +++ b/specs/001-end-user-cli/research.md @@ -12,6 +12,7 @@ configuration, authentication, and client initialization code. A second entry po the same package reuses all existing infrastructure. **Alternatives considered**: + - Separate Python package: rejected — duplicates config/client code, complicates releases - Subcommand of `infrahubctl`: rejected — user explicitly wants separate `infrahub` command with end-user focus distinct from developer tooling @@ -26,6 +27,7 @@ Rich console output, matching the `infrahubctl` patterns exactly. `initialize_client`, `CONFIG_PARAM`) work with this pattern. **Alternatives considered**: + - Click directly: rejected — less ergonomic, would diverge from existing patterns - Sync-only CLI: rejected — SDK client methods are async-first @@ -40,6 +42,7 @@ parameters. Filter syntax is `attribute__value="x"` or `relationship__id="uuid"` Pagination is handled automatically with `client.pagination_size`. **Key findings**: + - `node.display_label` provides the human-readable name for table display - `node..value` accesses attribute values - `schema.attribute_names` and `schema.relationship_names` enumerate fields @@ -58,6 +61,7 @@ reverse direction needs a serializer that walks node attributes and relationship produce the same dict structure. **Key classes**: + - `InfrahubObjectFileData` — spec model with `kind`, `parameters`, `data` fields - `ObjectFile` — file wrapper with `validate_content()` and `process()` methods - Relationship formats: `ONE_REF`, `MANY_REF`, `ONE_OBJ`, `MANY_OBJ_DICT_LIST` @@ -74,6 +78,7 @@ produce the same dict structure. ## R6: Create/Update/Delete Implementation **Decision**: Use existing SDK CRUD methods: + - Create: `client.create(kind=kind, data=data)` → `node.save(allow_upsert=True)` - Update: `client.get(kind=kind, id=identifier)` → modify attrs → `node.save()` - Delete: `client.get(kind=kind, id=identifier)` → `node.delete()` diff --git a/specs/001-end-user-cli/tasks.md b/specs/001-end-user-cli/tasks.md index 031c08d6..48e1ac6e 100644 --- a/specs/001-end-user-cli/tasks.md +++ b/specs/001-end-user-cli/tasks.md @@ -27,8 +27,8 @@ - [x] T001 Add `infrahub` entry point to `[project.scripts]` in pyproject.toml pointing to `infrahub_sdk.ctl.enduser_cli:app` - [x] T002 Create CLI entry point module in infrahub_sdk/ctl/enduser_cli.py with AsyncTyper app and error-handling wrapper (matching infrahub_sdk/ctl/cli.py pattern) -- [x] T003 [P] Create commands package with infrahub_sdk/ctl/commands/__init__.py -- [x] T004 [P] Create formatters package with infrahub_sdk/ctl/formatters/__init__.py +- [x] T003 [P] Create commands package with infrahub_sdk/ctl/commands/\_\_init\_\_.py +- [x] T004 [P] Create formatters package with infrahub_sdk/ctl/formatters/\_\_init\_\_.py --- @@ -40,7 +40,7 @@ - [x] T005 Implement `--set` flag parser (parse `key=value` strings into dict) in infrahub_sdk/ctl/parsers.py - [x] T006 Implement `--filter` flag parser (parse `attr__value=x` strings into kwargs dict) in infrahub_sdk/ctl/parsers.py -- [x] T007 Implement output format auto-detection (TTY → table, piped → json) and `OutputFormat` enum in infrahub_sdk/ctl/formatters/__init__.py +- [x] T007 Implement output format auto-detection (TTY → table, piped → json) and `OutputFormat` enum in infrahub_sdk/ctl/formatters/\_\_init\_\_.py - [x] T008 [P] Implement base formatter protocol with `format_list()` and `format_detail()` methods in infrahub_sdk/ctl/formatters/base.py - [x] T009 [P] Implement Rich table formatter (list view: attribute + relationship columns with display names; detail view: key-value pairs) in infrahub_sdk/ctl/formatters/table.py - [x] T010 [P] Implement JSON formatter (list and detail mode) in infrahub_sdk/ctl/formatters/json.py diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py index 3648a695..a01b73d0 100644 --- a/tests/integration/test_enduser_cli.py +++ b/tests/integration/test_enduser_cli.py @@ -30,8 +30,8 @@ runner = CliRunner() -class TestEnduserCli(TestInfrahubDockerClient, SchemaAnimal): - """Integration tests for the ``infrahub`` CLI against a live Infrahub instance.""" +class _EnduserCliBase(TestInfrahubDockerClient, SchemaAnimal): + """Shared fixtures for end-user CLI integration tests.""" @pytest.fixture(scope="class") async def base_dataset( @@ -66,9 +66,9 @@ def ctl_client_config(self, client: InfrahubClient) -> Generator[None, None, Non if original_password: os.environ["INFRAHUB_PASSWORD"] = original_password - # ------------------------------------------------------------------ - # infrahub --version - # ------------------------------------------------------------------ + +class TestEnduserCliRead(_EnduserCliBase): + """Read-only CLI tests: version, schema discovery, and get queries.""" def test_version(self) -> None: """Verify the --version flag works without a server.""" @@ -76,10 +76,6 @@ def test_version(self) -> None: assert result.exit_code == 0 assert "infrahub v" in result.stdout - # ------------------------------------------------------------------ - # infrahub schema (US5) - # ------------------------------------------------------------------ - def test_schema_list(self, base_dataset: None) -> None: """List schema kinds and verify TestingPerson is present.""" result = runner.invoke(app, ["schema", "list"]) @@ -102,10 +98,6 @@ def test_schema_show(self, base_dataset: None) -> None: assert "height" in result.stdout assert "animals" in result.stdout - # ------------------------------------------------------------------ - # infrahub get (US1) - # ------------------------------------------------------------------ - def test_get_list_table(self, base_dataset: None) -> None: """Query all persons and verify table output contains known names.""" result = runner.invoke(app, ["get", "TestingPerson"]) @@ -177,11 +169,11 @@ def test_get_invalid_kind(self, base_dataset: None) -> None: result = runner.invoke(app, ["get", "NonExistentKind"]) assert result.exit_code != 0 - # ------------------------------------------------------------------ - # infrahub create (US2) - # ------------------------------------------------------------------ - async def test_create_inline(self, base_dataset: None, client: InfrahubClient) -> None: +class TestEnduserCliWrite(_EnduserCliBase): + """Write CLI tests: create, update, delete operations.""" + + def test_create_inline(self, base_dataset: None) -> None: """Create a person using inline --set flags.""" result = runner.invoke( app, @@ -189,9 +181,9 @@ async def test_create_inline(self, base_dataset: None, client: InfrahubClient) - ) assert result.exit_code == 0 assert "Created" in result.stdout - assert "Integration Test Person" in result.stdout - # Verify via SDK + async def test_create_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the object created by test_create_inline exists.""" node = await client.get(kind="TestingPerson", id="Integration Test Person") assert node.name.value == "Integration Test Person" # type: ignore[union-attr] assert node.height.value == 190 # type: ignore[union-attr] @@ -201,11 +193,7 @@ def test_create_missing_args(self, base_dataset: None) -> None: result = runner.invoke(app, ["create", "TestingPerson"]) assert result.exit_code != 0 - # ------------------------------------------------------------------ - # infrahub update (US3) - # ------------------------------------------------------------------ - - async def test_update_inline(self, base_dataset: None, client: InfrahubClient) -> None: + def test_update_inline(self, base_dataset: None) -> None: """Update a person's height using --set.""" result = runner.invoke( app, @@ -214,24 +202,23 @@ async def test_update_inline(self, base_dataset: None, client: InfrahubClient) - assert result.exit_code == 0 assert "Updated" in result.stdout - # Verify via SDK + async def test_update_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the update from test_update_inline persisted.""" node = await client.get(kind="TestingPerson", id="Sophia Walker") assert node.height.value == 175 # type: ignore[union-attr] - # ------------------------------------------------------------------ - # infrahub delete (US4) - # ------------------------------------------------------------------ - - async def test_delete_with_yes(self, base_dataset: None, client: InfrahubClient) -> None: - """Delete a person using --yes to skip confirmation.""" - # Create a throwaway object + async def test_delete_setup(self, base_dataset: None, client: InfrahubClient) -> None: + """Create a throwaway object for the delete test.""" obj = await client.create(kind="TestingPerson", name="Delete Me", height=100) await obj.save() + def test_delete_with_yes(self, base_dataset: None) -> None: + """Delete a person using --yes to skip confirmation.""" result = runner.invoke(app, ["delete", "TestingPerson", "Delete Me", "--yes"]) assert result.exit_code == 0 assert "Deleted" in result.stdout - # Verify deleted + async def test_delete_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the object from test_delete_with_yes is gone.""" node = await client.get(kind="TestingPerson", id="Delete Me", raise_when_missing=False) assert node is None From 94e378058c2770c343ec741dfcb54b879cacce9b Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 13:54:50 +0000 Subject: [PATCH 03/32] test: increase unit test coverage for end-user CLI - Add 47 new tests (134 total, up from 87) - schema.py: test list/show with mocked client, filter, branch, empty schema - create.py: test --set inline, --file, invalid field, multiple args, branch - update.py: test --set attributes/relationships, --file, branch - delete.py: test --yes, -y, branch, confirmation abort/accept - formatters/__init__.py: test detect_output_format TTY/non-TTY, get_formatter - yaml/csv/table: test many-cardinality rels, empty peers, None values --- tests/unit/ctl/commands/test_create.py | 169 ++++++++++++++ tests/unit/ctl/commands/test_delete.py | 112 +++++++++ tests/unit/ctl/commands/test_schema.py | 300 +++++++++++++++++++++++++ tests/unit/ctl/commands/test_update.py | 207 +++++++++++++++++ tests/unit/ctl/formatters/test_init.py | 55 +++++ tests/unit/ctl/formatters/test_yaml.py | 138 +++++++++++- 6 files changed, 980 insertions(+), 1 deletion(-) create mode 100644 tests/unit/ctl/formatters/test_init.py diff --git a/tests/unit/ctl/commands/test_create.py b/tests/unit/ctl/commands/test_create.py index 9013864a..83d937bf 100644 --- a/tests/unit/ctl/commands/test_create.py +++ b/tests/unit/ctl/commands/test_create.py @@ -2,6 +2,9 @@ from __future__ import annotations +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest from typer.testing import CliRunner from infrahub_sdk.ctl.enduser_cli import app @@ -26,3 +29,169 @@ def test_create_no_args() -> None: """Omitting both --set and --file exits with a non-zero code.""" result = runner.invoke(app, ["create", "InfraDevice"]) assert result.exit_code != 0 + + +def test_create_with_set_args() -> None: + """``create`` with --set creates a node and prints a confirmation.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = ["site"] + + mock_node = MagicMock() + mock_node.id = "test-id-001" + mock_node.display_label = "router1" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", "name=router1"]) + + assert result.exit_code == 0, result.stdout + assert "Created" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_client.create.assert_awaited_once_with(kind="InfraDevice", data={"name": "router1"}, branch=None) + mock_node.save.assert_awaited_once_with(allow_upsert=True) + + +def test_create_with_set_args_and_branch() -> None: + """``create`` forwards --branch to client calls.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "test-id-002" + mock_node.display_label = "router2" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke( + app, + ["create", "InfraDevice", "--set", "name=router2", "--branch", "dev"], + ) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="dev") + mock_client.create.assert_awaited_once_with(kind="InfraDevice", data={"name": "router2"}, branch="dev") + + +def test_create_with_file() -> None: + """``create`` with --file delegates to ObjectFile and prints a confirmation.""" + mock_file = MagicMock() + mock_file.spec.data = [{"name": "router-a"}, {"name": "router-b"}] + mock_file.spec.kind = "InfraDevice" + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.create.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke(app, ["create", "InfraDevice", "--file", "devices.yml"]) + + assert result.exit_code == 0, result.stdout + assert "Created" in result.stdout + assert "2" in result.stdout + assert "InfraDevice" in result.stdout + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch=None) + mock_file.process.assert_awaited_once_with(client=mock_client, branch=None) + + +def test_create_with_file_multiple_files() -> None: + """``create`` with --file processes every file returned by load_from_disk.""" + + def make_obj_file(kind: str, count: int) -> MagicMock: + obj = MagicMock() + obj.spec.data = [{"name": f"item-{i}"} for i in range(count)] + obj.spec.kind = kind + obj.validate_format = AsyncMock() + obj.process = AsyncMock() + return obj + + file_a = make_obj_file("InfraDevice", 2) + file_b = make_obj_file("InfraPrefix", 3) + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.create.ObjectFile.load_from_disk", + return_value=[file_a, file_b], + ), + ): + result = runner.invoke(app, ["create", "InfraDevice", "--file", "multi.yml"]) + + assert result.exit_code == 0, result.stdout + file_a.validate_format.assert_awaited_once() + file_a.process.assert_awaited_once() + file_b.validate_format.assert_awaited_once() + file_b.process.assert_awaited_once() + + +def test_create_invalid_field() -> None: + """Using --set with an unknown field name exits with a non-zero code.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = ["site"] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", "nonexistent_field=value"]) + + assert result.exit_code != 0 + + +def test_create_multiple_set_args() -> None: + """``create`` accepts multiple --set options and passes all fields to the client.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "test-id-003" + mock_node.display_label = "router3" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke( + app, + ["create", "InfraDevice", "--set", "name=router3", "--set", "description=core router"], + ) + + assert result.exit_code == 0, result.stdout + _, call_kwargs = mock_client.create.call_args + assert call_kwargs["data"] == {"name": "router3", "description": "core router"} + + +@pytest.mark.parametrize("bad_arg", ["noequals", "=emptykey"]) +def test_create_malformed_set_arg(bad_arg: str) -> None: + """Malformed --set arguments (no ``=`` or empty key) exit with a non-zero code.""" + mock_client = MagicMock() + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", bad_arg]) + + assert result.exit_code != 0 diff --git a/tests/unit/ctl/commands/test_delete.py b/tests/unit/ctl/commands/test_delete.py index ca257f23..4f5c5d0f 100644 --- a/tests/unit/ctl/commands/test_delete.py +++ b/tests/unit/ctl/commands/test_delete.py @@ -2,6 +2,8 @@ from __future__ import annotations +from unittest.mock import AsyncMock, MagicMock, patch + from typer.testing import CliRunner from infrahub_sdk.ctl.enduser_cli import app @@ -14,3 +16,113 @@ def test_delete_help() -> None: result = runner.invoke(app, ["delete", "--help"]) assert result.exit_code == 0 assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_delete_with_yes() -> None: + """``delete --yes`` skips confirmation, deletes the node, and prints a confirmation.""" + mock_node = MagicMock() + mock_node.id = "node-del-001" + mock_node.display_label = "router-to-delete" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["delete", "InfraDevice", "node-del-001", "--yes"]) + + assert result.exit_code == 0, result.stdout + assert "Deleted" in result.stdout + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="node-del-001") + mock_node.delete.assert_awaited_once() + + +def test_delete_with_yes_short_flag() -> None: + """``delete -y`` is equivalent to ``--yes``.""" + mock_node = MagicMock() + mock_node.id = "node-del-002" + mock_node.display_label = "router-b" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["delete", "InfraDevice", "node-del-002", "-y"]) + + assert result.exit_code == 0, result.stdout + mock_node.delete.assert_awaited_once() + + +def test_delete_with_branch() -> None: + """``delete`` forwards --branch to initialize_client.""" + mock_node = MagicMock() + mock_node.id = "node-br-del" + mock_node.display_label = "device-in-branch" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client) as mock_init: + result = runner.invoke( + app, + ["delete", "InfraDevice", "node-br-del", "--yes", "--branch", "my-branch"], + ) + + assert result.exit_code == 0, result.stdout + mock_init.assert_called_once_with(branch="my-branch") + mock_node.delete.assert_awaited_once() + + +def test_delete_confirmation_abort() -> None: + """Answering ``n`` at the confirmation prompt aborts deletion without calling delete.""" + mock_node = MagicMock() + mock_node.id = "node-abort" + mock_node.display_label = "router-keep" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["delete", "InfraDevice", "node-abort"], input="n\n") + + assert result.exit_code != 0 + mock_node.delete.assert_not_awaited() + + +def test_delete_confirmation_yes_input() -> None: + """Answering ``y`` at the confirmation prompt proceeds with deletion.""" + mock_node = MagicMock() + mock_node.id = "node-confirm" + mock_node.display_label = "router-confirm" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["delete", "InfraDevice", "node-confirm"], input="y\n") + + assert result.exit_code == 0, result.stdout + assert "Deleted" in result.stdout + mock_node.delete.assert_awaited_once() + + +def test_delete_output_contains_id_and_label() -> None: + """Deletion confirmation message includes the node ID and display label.""" + mock_node = MagicMock() + mock_node.id = "unique-id-xyz" + mock_node.display_label = "specific-router" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["delete", "InfraDevice", "unique-id-xyz", "--yes"]) + + assert result.exit_code == 0, result.stdout + assert "unique-id-xyz" in result.stdout + assert "specific-router" in result.stdout diff --git a/tests/unit/ctl/commands/test_schema.py b/tests/unit/ctl/commands/test_schema.py index 51c28408..4303d57a 100644 --- a/tests/unit/ctl/commands/test_schema.py +++ b/tests/unit/ctl/commands/test_schema.py @@ -2,13 +2,89 @@ from __future__ import annotations +from unittest.mock import AsyncMock, MagicMock, patch + from typer.testing import CliRunner from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.schema import NodeSchemaAPI runner = CliRunner() +def _make_node_schema(kind: str, namespace: str, name: str, description: str = "") -> MagicMock: + """Build a MagicMock that satisfies ``isinstance(obj, NodeSchemaAPI)`` checks. + + Args: + kind: Full kind string, e.g. ``"InfraDevice"``. + namespace: Namespace portion, e.g. ``"Infra"``. + name: Name portion, e.g. ``"Device"``. + description: Optional human-readable description. + + Returns: + A MagicMock with spec=NodeSchemaAPI and the given property values. + """ + schema = MagicMock(spec=NodeSchemaAPI) + schema.kind = kind + schema.namespace = namespace + schema.name = name + schema.description = description + return schema + + +def _make_attr( + name: str, + kind: str = "Text", + optional: bool = True, + default_value: object = None, + description: str = "", +) -> MagicMock: + """Build a mock attribute object for use in schema_show tests. + + Args: + name: Attribute name. + kind: Attribute type/kind string. + optional: Whether the attribute is optional. + default_value: Default value for the attribute. + description: Optional description. + + Returns: + A plain MagicMock with the given property values. + """ + attr = MagicMock() + attr.name = name + attr.kind = kind + attr.optional = optional + attr.default_value = default_value + attr.description = description + return attr + + +def _make_rel(name: str, peer: str, cardinality: str = "one", optional: bool = True) -> MagicMock: + """Build a mock relationship object for use in schema_show tests. + + Args: + name: Relationship name. + peer: Peer kind string. + cardinality: ``"one"`` or ``"many"``. + optional: Whether the relationship is optional. + + Returns: + A plain MagicMock with the given property values. + """ + rel = MagicMock() + rel.name = name + rel.peer = peer + rel.cardinality = cardinality + rel.optional = optional + return rel + + +# --------------------------------------------------------------------------- +# Help tests +# --------------------------------------------------------------------------- + + def test_schema_list_help() -> None: """``schema list --help`` exits cleanly and includes usage text.""" result = runner.invoke(app, ["schema", "list", "--help"]) @@ -21,3 +97,227 @@ def test_schema_show_help() -> None: result = runner.invoke(app, ["schema", "show", "--help"]) assert result.exit_code == 0 assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +# --------------------------------------------------------------------------- +# schema list tests +# --------------------------------------------------------------------------- + + +def test_schema_list_returns_table() -> None: + """``schema list`` renders a table containing the returned kind names.""" + device_schema = _make_node_schema("InfraDevice", "Infra", "Device", "A network device") + interface_schema = _make_node_schema("InfraInterface", "Infra", "Interface", "A network interface") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "InfraInterface": interface_schema}) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "InfraInterface" in result.stdout + mock_client.schema.all.assert_awaited_once_with(branch=None) + + +def test_schema_list_with_filter() -> None: + """``schema list --filter`` restricts output to kinds matching the substring.""" + device_schema = _make_node_schema("InfraDevice", "Infra", "Device") + prefix_schema = _make_node_schema("IpamPrefix", "Ipam", "Prefix") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "IpamPrefix": prefix_schema}) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list", "--filter", "infra"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "IpamPrefix" not in result.stdout + + +def test_schema_list_empty() -> None: + """``schema list`` exits cleanly when no schemas are returned.""" + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={}) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + + +def test_schema_list_with_branch() -> None: + """``schema list --branch`` passes the branch name through to the client.""" + schema = _make_node_schema("CoreAccount", "Core", "Account") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"CoreAccount": schema}) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list", "--branch", "feature-x"]) + + assert result.exit_code == 0, result.stdout + mock_client.schema.all.assert_awaited_once_with(branch="feature-x") + + +def test_schema_list_skips_non_node_schema_entries() -> None: + """``schema list`` silently skips entries that are not NodeSchemaAPI instances.""" + node_schema = _make_node_schema("InfraDevice", "Infra", "Device") + # A plain MagicMock without spec=NodeSchemaAPI will fail isinstance(x, NodeSchemaAPI) + generic_schema = MagicMock() + generic_schema.kind = "SomeGenericKind" + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": node_schema, "SomeGenericKind": generic_schema}) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "SomeGenericKind" not in result.stdout + + +# --------------------------------------------------------------------------- +# schema show tests +# --------------------------------------------------------------------------- + + +def _make_full_schema( + kind: str = "InfraDevice", + namespace: str = "Infra", + description: str = "A network device", + display_labels: list[str] | None = None, + human_friendly_id: list[str] | None = None, + attributes: list[MagicMock] | None = None, + relationships: list[MagicMock] | None = None, +) -> MagicMock: + """Build a detailed schema mock suitable for schema_show. + + Args: + kind: Full kind string. + namespace: Namespace portion. + description: Human-readable description. + display_labels: List of display label expressions. + human_friendly_id: List of human-friendly ID expressions. + attributes: List of attribute mocks. + relationships: List of relationship mocks. + + Returns: + A MagicMock configured with all schema_show-required fields. + """ + schema = MagicMock() + schema.kind = kind + schema.namespace = namespace + schema.description = description + schema.display_labels = display_labels + schema.human_friendly_id = human_friendly_id + schema.attributes = attributes or [] + schema.relationships = relationships or [] + return schema + + +def test_schema_show_displays_metadata() -> None: + """``schema show`` prints kind, description and namespace.""" + schema = _make_full_schema() + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "A network device" in result.stdout + assert "Infra" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + + +def test_schema_show_displays_attributes() -> None: + """``schema show`` renders the Attributes table with all column values.""" + attrs = [ + _make_attr("hostname", kind="Text", optional=False, default_value=None, description="Device hostname"), + _make_attr("role", kind="Text", optional=True, default_value="router", description="Device role"), + ] + schema = _make_full_schema(attributes=attrs) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Attributes" in result.stdout + assert "hostname" in result.stdout + assert "role" in result.stdout + # Required attribute should show "Yes", optional should show "No" + assert "Yes" in result.stdout + assert "No" in result.stdout + assert "router" in result.stdout + assert "Device hostname" in result.stdout + + +def test_schema_show_displays_relationships() -> None: + """``schema show`` renders the Relationships table with peer and cardinality.""" + rels = [ + _make_rel("interfaces", peer="InfraInterface", cardinality="many", optional=True), + _make_rel("site", peer="LocationSite", cardinality="one", optional=False), + ] + schema = _make_full_schema(relationships=rels) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Relationships" in result.stdout + assert "interfaces" in result.stdout + assert "InfraInterface" in result.stdout + assert "many" in result.stdout + assert "site" in result.stdout + assert "LocationSite" in result.stdout + assert "one" in result.stdout + + +def test_schema_show_no_attributes_or_relationships() -> None: + """``schema show`` exits cleanly for a schema with no attributes or relationships.""" + schema = _make_full_schema(attributes=[], relationships=[]) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Attributes" not in result.stdout + assert "Relationships" not in result.stdout + + +def test_schema_show_with_branch() -> None: + """``schema show --branch`` passes the branch name through to the client.""" + schema = _make_full_schema() + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice", "--branch", "feature-x"]) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="feature-x") + + +def test_schema_show_attribute_with_default_value() -> None: + """``schema show`` displays the default value when set on an attribute.""" + attrs = [_make_attr("speed", kind="Number", optional=True, default_value=1000)] + schema = _make_full_schema(attributes=attrs) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "1000" in result.stdout diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py index 0480f265..d19aaf0d 100644 --- a/tests/unit/ctl/commands/test_update.py +++ b/tests/unit/ctl/commands/test_update.py @@ -2,6 +2,9 @@ from __future__ import annotations +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest from typer.testing import CliRunner from infrahub_sdk.ctl.enduser_cli import app @@ -29,3 +32,207 @@ def test_update_no_args() -> None: """Omitting both --set and --file exits with a non-zero code.""" result = runner.invoke(app, ["update", "InfraDevice", "abc-123"]) assert result.exit_code != 0 + + +def test_update_with_set_args() -> None: + """``update`` with --set fetches the node, applies the change, and saves it.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old-name" + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "router1" + mock_node.name = mock_attr + mock_node.save = AsyncMock() + + def getattr_side_effect(obj: object, name: str) -> MagicMock: + if name == "name": + return mock_attr + return MagicMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.get = AsyncMock(return_value=mock_node) + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.update.getattr", side_effect=getattr_side_effect, create=True), + ): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "name=router1"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="abc-123") + mock_node.save.assert_awaited_once() + + +def test_update_with_set_args_attribute_applied() -> None: + """``update`` with an attribute --set updates the attribute value on the node.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["description"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old description" + + mock_node = MagicMock() + mock_node.id = "node-001" + mock_node.display_label = "device-a" + mock_node.save = AsyncMock() + + # Make getattr(node, "description") return mock_attr + type(mock_node).description = mock_attr + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["update", "InfraDevice", "node-001", "--set", "description=new description"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_node.save.assert_awaited_once() + + +def test_update_with_set_args_and_branch() -> None: + """``update`` forwards --branch to schema and get calls.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old" + + mock_node = MagicMock() + mock_node.id = "node-br" + mock_node.display_label = "device-br" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke( + app, + ["update", "InfraDevice", "node-br", "--set", "name=newname", "--branch", "feature-x"], + ) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="feature-x") + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="node-br") + + +def test_update_invalid_field() -> None: + """Using --set with an unknown field name exits with a non-zero code.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "unknown_field=value"]) + + assert result.exit_code != 0 + + +def test_update_with_file() -> None: + """``update`` with --file delegates to ObjectFile and prints a confirmation.""" + mock_file = MagicMock() + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--file", "updates.yml"]) + + assert result.exit_code == 0, result.stdout + assert "Processed" in result.stdout or "successfully" in result.stdout.lower() + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch=None) + mock_file.process.assert_awaited_once_with(client=mock_client, branch=None) + + +def test_update_with_file_and_branch() -> None: + """``update`` with --file forwards --branch to validate_format and process.""" + mock_file = MagicMock() + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke( + app, + ["update", "InfraDevice", "abc-123", "--file", "updates.yml", "--branch", "staging"], + ) + + assert result.exit_code == 0, result.stdout + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch="staging") + mock_file.process.assert_awaited_once_with(client=mock_client, branch="staging") + + +def test_update_with_set_args_relationship() -> None: + """``update`` with a relationship --set field fetches the rel and sets the new id.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = ["site"] + + mock_rel = MagicMock() + mock_rel.id = "old-site-id" + mock_rel.display_label = "old-site" + mock_rel.fetch = AsyncMock() + + mock_node = MagicMock() + mock_node.id = "node-rel-001" + mock_node.display_label = "device-rel" + mock_node.site = mock_rel + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.get = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["update", "InfraDevice", "node-rel-001", "--set", "site=new-site-id"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_rel.fetch.assert_awaited_once() + mock_node.save.assert_awaited_once() + + +@pytest.mark.parametrize("bad_arg", ["noequals", "=emptykey"]) +def test_update_malformed_set_arg(bad_arg: str) -> None: + """Malformed --set arguments (no ``=`` or empty key) exit with a non-zero code.""" + mock_client = MagicMock() + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", bad_arg]) + + assert result.exit_code != 0 diff --git a/tests/unit/ctl/formatters/test_init.py b/tests/unit/ctl/formatters/test_init.py new file mode 100644 index 00000000..d822ab8d --- /dev/null +++ b/tests/unit/ctl/formatters/test_init.py @@ -0,0 +1,55 @@ +"""Tests for infrahub_sdk.ctl.formatters package init (OutputFormat, detect/get).""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest + +from infrahub_sdk.ctl.formatters import ( + CsvFormatter, + JsonFormatter, + OutputFormat, + TableFormatter, + YamlFormatter, + detect_output_format, + get_formatter, +) + + +class TestOutputFormat: + def test_enum_values(self) -> None: + assert OutputFormat.TABLE == "table" + assert OutputFormat.JSON == "json" + assert OutputFormat.CSV == "csv" + assert OutputFormat.YAML == "yaml" + + +class TestDetectOutputFormat: + def test_returns_table_when_tty(self) -> None: + with patch("sys.stdout") as mock_stdout: + mock_stdout.isatty.return_value = True + assert detect_output_format() == OutputFormat.TABLE + + def test_returns_json_when_not_tty(self) -> None: + with patch("sys.stdout") as mock_stdout: + mock_stdout.isatty.return_value = False + assert detect_output_format() == OutputFormat.JSON + + +class TestGetFormatter: + def test_returns_table_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.TABLE), TableFormatter) + + def test_returns_json_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.JSON), JsonFormatter) + + def test_returns_csv_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.CSV), CsvFormatter) + + def test_returns_yaml_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.YAML), YamlFormatter) + + def test_raises_for_invalid_format(self) -> None: + with pytest.raises(ValueError, match="Unsupported output format"): + get_formatter("invalid") # type: ignore[arg-type] diff --git a/tests/unit/ctl/formatters/test_yaml.py b/tests/unit/ctl/formatters/test_yaml.py index d64b34af..25a7525b 100644 --- a/tests/unit/ctl/formatters/test_yaml.py +++ b/tests/unit/ctl/formatters/test_yaml.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import yaml # pyright: ignore[reportMissingModuleSource] @@ -204,3 +204,139 @@ def test_format_detail_relationship_uses_display_label(self) -> None: parsed = yaml.safe_load(result) assert parsed["spec"]["data"][0]["site"] == "DC1" + + +class TestYamlFormatterEdgeCases: + """Edge case tests targeting uncovered branches in YamlFormatter._node_to_data_entry.""" + + def test_attr_detail_not_dict_uses_raw_value(self) -> None: + """Test that a non-dict attr_detail is used as the raw entry value. + + Covers the ``else`` branch in _node_to_data_entry for attributes when + detail.get(attr_name) returns something that is not a dict. + """ + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + fake_detail = { + "id": "test-id", + "display_label": "Test", + "kind": "TestKind", + "name": "raw-string-value", # not a dict + } + with patch( + "infrahub_sdk.ctl.formatters.yaml.extract_node_detail", + return_value=fake_detail, + ): + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["name"] == "raw-string-value" + + def test_rel_detail_not_dict_uses_raw_value(self) -> None: + """Test that a non-dict rel_detail is used as the raw entry value. + + Covers the ``not isinstance(rel_detail, dict)`` branch for relationships. + """ + schema = _make_mock_schema([], ["site"]) + node = _make_mock_node({}, {"site": "DC1"}) + formatter = YamlFormatter() + + fake_detail = { + "id": "test-id", + "display_label": "Test", + "kind": "TestKind", + "site": "non-dict-rel-value", # not a dict + } + with patch( + "infrahub_sdk.ctl.formatters.yaml.extract_node_detail", + return_value=fake_detail, + ): + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["site"] == "non-dict-rel-value" + + def test_rel_cardinality_one_with_empty_display_label(self) -> None: + """Test cardinality-one relationship with an empty display_label. + + Covers the ``cardinality == "one"`` branch where display_label is "". + """ + schema = _make_mock_schema([], ["site"]) + node = _make_mock_node({}, {}) + # Attach a relationship with empty display_label using configure_mock + # to avoid setattr with a constant string literal. + rel = MagicMock() + rel.display_label = "" + rel.id = "site-id" + node.configure_mock(site=rel) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert not parsed["spec"]["data"][0]["site"] + + def test_rel_cardinality_many_with_empty_peers(self) -> None: + """Test cardinality-many relationship with an empty peers list. + + Covers the ``peers`` empty branch producing ``{"data": []}``. + """ + schema = MagicMock() + schema.kind = "TestKind" + schema.attribute_names = [] + schema.relationship_names = ["tags"] + + def get_rel_side_effect(name: str) -> MagicMock: + rel = MagicMock() + rel.cardinality = "many" + return rel + + schema.get_relationship = MagicMock(side_effect=get_rel_side_effect) + + node = MagicMock() + node.id = "test-id" + node.display_label = "Test" + rel_manager = MagicMock() + rel_manager.peers = [] + node.configure_mock(tags=rel_manager) + + formatter = YamlFormatter() + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["tags"] == {"data": []} + + def test_rel_cardinality_many_with_peers(self) -> None: + """Test cardinality-many relationship with populated peers. + + Covers the ``peers`` non-empty branch producing ``{"data": [...]}``. + """ + schema = MagicMock() + schema.kind = "TestKind" + schema.attribute_names = [] + schema.relationship_names = ["tags"] + + def get_rel_side_effect(name: str) -> MagicMock: + rel = MagicMock() + rel.cardinality = "many" + return rel + + schema.get_relationship = MagicMock(side_effect=get_rel_side_effect) + + node = MagicMock() + node.id = "test-id" + node.display_label = "Test" + rel_manager = MagicMock() + rel_manager.peers = [ + MagicMock(display_label="peer1", id="id1"), + MagicMock(display_label="peer2", id="id2"), + ] + node.configure_mock(tags=rel_manager) + + formatter = YamlFormatter() + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["tags"] == {"data": ["peer1", "peer2"]} From e71acb111dcce59f478177a48676631fd450f5cc Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 14:04:43 +0000 Subject: [PATCH 04/32] fix: resolve remaining CI failures - Fix MD032 blanks-around-lists in cli-commands.md and data-model.md - Revert SDK ref docs to match stable (CI regenerates with its own mdxify version) --- .../sdk_ref/infrahub_sdk/client.mdx | 216 +++++++++--------- .../sdk_ref/infrahub_sdk/node/attribute.mdx | 8 +- .../sdk_ref/infrahub_sdk/node/metadata.mdx | 4 +- .../sdk_ref/infrahub_sdk/node/node.mdx | 110 ++++----- .../sdk_ref/infrahub_sdk/node/parsers.mdx | 2 +- .../sdk_ref/infrahub_sdk/node/property.mdx | 2 +- .../infrahub_sdk/node/related_node.mdx | 38 +-- .../infrahub_sdk/node/relationship.mdx | 32 +-- .../contracts/cli-commands.md | 3 + specs/001-end-user-cli/data-model.md | 4 + 10 files changed, 213 insertions(+), 206 deletions(-) diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx index 0b91cc46..7b47c99c 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx @@ -7,13 +7,13 @@ sidebarTitle: client ## Classes -### `InfrahubClient` +### `InfrahubClient` GraphQL Client to interact with Infrahub. **Methods:** -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType | None @@ -22,50 +22,50 @@ get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timest
Show 6 other overloads -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType ``` -#### `get` +#### `get` ```python get(self, kind: type[SchemaType], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode | None ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNode | SchemaType | None ```
-#### `delete` +#### `delete` ```python delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None ``` -#### `create` +#### `create` ```python create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNode | SchemaType @@ -74,20 +74,20 @@ create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str
Show 2 other overloads -#### `create` +#### `create` ```python create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNode ``` -#### `create` +#### `create` ```python create(self, kind: type[SchemaType], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaType ```
-#### `get_version` +#### `get_version` ```python get_version(self) -> str @@ -95,7 +95,7 @@ get_version(self) -> str Return the Infrahub version. -#### `get_user` +#### `get_user` ```python get_user(self) -> dict @@ -103,7 +103,7 @@ get_user(self) -> dict Return user information -#### `get_user_permissions` +#### `get_user_permissions` ```python get_user_permissions(self) -> dict @@ -111,7 +111,7 @@ get_user_permissions(self) -> dict Return user permissions -#### `count` +#### `count` ```python count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int @@ -119,7 +119,7 @@ count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: s Return the number of nodes of a given kind. -#### `all` +#### `all` ```python all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaType] @@ -128,13 +128,13 @@ all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None
Show 2 other overloads -#### `all` +#### `all` ```python all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNode] ``` -#### `all` +#### `all` ```python all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNode] | list[SchemaType] @@ -164,7 +164,7 @@ Retrieve all nodes of a given kind - list\[InfrahubNode]: List of Nodes
-#### `filters` +#### `filters` ```python filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaType] @@ -173,13 +173,13 @@ filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str |
Show 2 other overloads -#### `filters` +#### `filters` ```python filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNode] ``` -#### `filters` +#### `filters` ```python filters(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNode] | list[SchemaType] @@ -211,7 +211,7 @@ Retrieve nodes of a given kind based on provided filters. - list\[InfrahubNodeSync]: List of Nodes that match the given filters.
-#### `clone` +#### `clone` ```python clone(self, branch: str | None = None) -> InfrahubClient @@ -219,7 +219,7 @@ clone(self, branch: str | None = None) -> InfrahubClient Return a cloned version of the client using the same configuration -#### `execute_graphql` +#### `execute_graphql` ```python execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict @@ -247,37 +247,37 @@ If retry_on_failure is True, the query will retry until the server becomes reach - The GraphQL data payload (response["data"]). -#### `refresh_login` +#### `refresh_login` ```python refresh_login(self) -> None ``` -#### `login` +#### `login` ```python login(self, refresh: bool = False) -> None ``` -#### `query_gql_query` +#### `query_gql_query` ```python query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict ``` -#### `create_diff` +#### `create_diff` ```python create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str ``` -#### `get_diff_summary` +#### `get_diff_summary` ```python get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] ``` -#### `get_diff_tree` +#### `get_diff_tree` ```python get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None @@ -287,7 +287,7 @@ Get complete diff tree with metadata and nodes. Returns None if no diff exists. -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType @@ -296,37 +296,37 @@ allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType],
Show 6 other overloads -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None @@ -350,7 +350,7 @@ Returns: InfrahubNode: Node corresponding to the allocated resource.
-#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType @@ -359,37 +359,37 @@ allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], i
Show 6 other overloads -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None @@ -414,25 +414,25 @@ Returns: InfrahubNode: Node corresponding to the allocated resource.
-#### `create_batch` +#### `create_batch` ```python create_batch(self, return_exceptions: bool = False) -> InfrahubBatch ``` -#### `get_list_repositories` +#### `get_list_repositories` ```python get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] ``` -#### `repository_update_commit` +#### `repository_update_commit` ```python repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool ``` -#### `convert_object_type` +#### `convert_object_type` ```python convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode @@ -443,11 +443,11 @@ and its values indicate how to fill in these fields. Any mandatory field not hav in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type for more information. -### `InfrahubClientSync` +### `InfrahubClientSync` **Methods:** -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync | None @@ -456,50 +456,50 @@ get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Ti
Show 6 other overloads -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync ``` -#### `get` +#### `get` ```python get(self, kind: type[SchemaTypeSync], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync | None ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self, kind: str | type[SchemaTypeSync], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync | None ```
-#### `delete` +#### `delete` ```python delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None ``` -#### `create` +#### `create` ```python create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync @@ -508,20 +508,20 @@ create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch:
Show 2 other overloads -#### `create` +#### `create` ```python create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNodeSync ``` -#### `create` +#### `create` ```python create(self, kind: type[SchemaTypeSync], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaTypeSync ```
-#### `get_version` +#### `get_version` ```python get_version(self) -> str @@ -529,7 +529,7 @@ get_version(self) -> str Return the Infrahub version. -#### `get_user` +#### `get_user` ```python get_user(self) -> dict @@ -537,7 +537,7 @@ get_user(self) -> dict Return user information -#### `get_user_permissions` +#### `get_user_permissions` ```python get_user_permissions(self) -> dict @@ -545,7 +545,7 @@ get_user_permissions(self) -> dict Return user permissions -#### `clone` +#### `clone` ```python clone(self, branch: str | None = None) -> InfrahubClientSync @@ -553,7 +553,7 @@ clone(self, branch: str | None = None) -> InfrahubClientSync Return a cloned version of the client using the same configuration -#### `execute_graphql` +#### `execute_graphql` ```python execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict @@ -582,7 +582,7 @@ GraphQL errors always raise `GraphQLError`. Defaults to None. - The GraphQL data payload (`response["data"]`). -#### `count` +#### `count` ```python count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int @@ -590,7 +590,7 @@ count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: s Return the number of nodes of a given kind. -#### `all` +#### `all` ```python all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaTypeSync] @@ -599,13 +599,13 @@ all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str |
Show 2 other overloads -#### `all` +#### `all` ```python all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNodeSync] ``` -#### `all` +#### `all` ```python all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNodeSync] | list[SchemaTypeSync] @@ -635,7 +635,7 @@ Retrieve all nodes of a given kind - list\[InfrahubNodeSync]: List of Nodes
-#### `filters` +#### `filters` ```python filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaTypeSync] @@ -644,13 +644,13 @@ filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: st
Show 2 other overloads -#### `filters` +#### `filters` ```python filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNodeSync] ``` -#### `filters` +#### `filters` ```python filters(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNodeSync] | list[SchemaTypeSync] @@ -682,7 +682,7 @@ Retrieve nodes of a given kind based on provided filters. - list\[InfrahubNodeSync]: List of Nodes that match the given filters.
-#### `create_batch` +#### `create_batch` ```python create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync @@ -693,31 +693,31 @@ Create a batch to execute multiple queries concurrently. Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such batch to manipulate objects that depend on each others. -#### `get_list_repositories` +#### `get_list_repositories` ```python get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] ``` -#### `query_gql_query` +#### `query_gql_query` ```python query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict ``` -#### `create_diff` +#### `create_diff` ```python create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str ``` -#### `get_diff_summary` +#### `get_diff_summary` ```python get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] ``` -#### `get_diff_tree` +#### `get_diff_tree` ```python get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None @@ -727,7 +727,7 @@ Get complete diff tree with metadata and nodes. Returns None if no diff exists. -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync @@ -736,37 +736,37 @@ allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTyp
Show 6 other overloads -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None ``` -#### `allocate_next_ip_address` +#### `allocate_next_ip_address` ```python allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None @@ -790,7 +790,7 @@ Returns: InfrahubNodeSync: Node corresponding to the allocated resource.
-#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync @@ -799,37 +799,37 @@ allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaType
Show 6 other overloads -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None ``` -#### `allocate_next_ip_prefix` +#### `allocate_next_ip_prefix` ```python allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None @@ -854,25 +854,25 @@ Returns: InfrahubNodeSync: Node corresponding to the allocated resource.
-#### `repository_update_commit` +#### `repository_update_commit` ```python repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool ``` -#### `refresh_login` +#### `refresh_login` ```python refresh_login(self) -> None ``` -#### `login` +#### `login` ```python login(self, refresh: bool = False) -> None ``` -#### `convert_object_type` +#### `convert_object_type` ```python convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync @@ -883,39 +883,39 @@ and its values indicate how to fill in these fields. Any mandatory field not hav in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type for more information. -### `ProcessRelationsNode` +### `ProcessRelationsNode` -### `ProxyConfig` +### `ProxyConfig` -### `ProxyConfigSync` +### `ProxyConfigSync` -### `ProcessRelationsNodeSync` +### `ProcessRelationsNodeSync` -### `BaseClient` +### `BaseClient` Base class for InfrahubClient and InfrahubClientSync **Methods:** -#### `request_context` +#### `request_context` ```python request_context(self) -> RequestContext | None ``` -#### `request_context` +#### `request_context` ```python request_context(self, request_context: RequestContext) -> None ``` -#### `start_tracking` +#### `start_tracking` ```python start_tracking(self, identifier: str | None = None, params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> Self ``` -#### `set_context_properties` +#### `set_context_properties` ```python set_context_properties(self, identifier: str, params: dict[str, str] | None = None, delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> None @@ -923,19 +923,19 @@ set_context_properties(self, identifier: str, params: dict[str, str] | None = No ## Functions -### `handle_relogin` +### `handle_relogin` ```python handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]) -> Callable[..., Coroutine[Any, Any, httpx.Response]] ``` -### `handle_relogin_sync` +### `handle_relogin_sync` ```python handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response] ``` -### `raise_for_error_deprecation_warning` +### `raise_for_error_deprecation_warning` ```python raise_for_error_deprecation_warning(value: bool | None) -> None diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx index 1743230f..d08c7fc5 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx @@ -7,25 +7,25 @@ sidebarTitle: attribute ## Classes -### `Attribute` +### `Attribute` Represents an attribute of a Node, including its schema, value, and properties. **Methods:** -#### `value` +#### `value` ```python value(self) -> Any ``` -#### `value` +#### `value` ```python value(self, value: Any) -> None ``` -#### `is_from_pool_attribute` +#### `is_from_pool_attribute` ```python is_from_pool_attribute(self) -> bool diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx index 5306c66f..6175236f 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx @@ -7,10 +7,10 @@ sidebarTitle: metadata ## Classes -### `NodeMetadata` +### `NodeMetadata` Represents metadata about a node (created_at, created_by, updated_at, updated_by). -### `RelationshipMetadata` +### `RelationshipMetadata` Represents metadata about a relationship edge (updated_at, updated_by). diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx index c3bc30c6..e23120dd 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx @@ -7,37 +7,37 @@ sidebarTitle: node ## Classes -### `InfrahubNode` +### `InfrahubNode` Represents a Infrahub node in an asynchronous context. **Methods:** -#### `from_graphql` +#### `from_graphql` ```python from_graphql(cls, client: InfrahubClient, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self ``` -#### `generate` +#### `generate` ```python generate(self, nodes: list[str] | None = None) -> None ``` -#### `artifact_generate` +#### `artifact_generate` ```python artifact_generate(self, name: str) -> None ``` -#### `artifact_fetch` +#### `artifact_fetch` ```python artifact_fetch(self, name: str) -> str | dict[str, Any] ``` -#### `download_file` +#### `download_file` ```python download_file(self, dest: Path | None = None) -> bytes | int @@ -75,25 +75,25 @@ The node must have been saved (have an id) before calling this method. >>> bytes_written = await contract.download_file(dest=Path("/tmp/contract.pdf")) ``` -#### `delete` +#### `delete` ```python delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `save` +#### `save` ```python save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `generate_query_data` +#### `generate_query_data` ```python generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] ``` -#### `generate_query_data_node` +#### `generate_query_data_node` ```python generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] @@ -115,31 +115,31 @@ Generate the node part of a GraphQL Query with attributes and nodes. - dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format -#### `add_relationships` +#### `add_relationships` ```python add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `remove_relationships` +#### `remove_relationships` ```python remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `create` +#### `create` ```python create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `update` +#### `update` ```python update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `get_pool_allocated_resources` +#### `get_pool_allocated_resources` ```python get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode] @@ -155,7 +155,7 @@ Fetch all nodes that were allocated for the pool and a given resource. - list\[InfrahubNode]: The allocated nodes. -#### `get_pool_resources_utilization` +#### `get_pool_resources_utilization` ```python get_pool_resources_utilization(self) -> list[dict[str, Any]] @@ -167,7 +167,7 @@ Fetch the utilization of each resource for the pool. - list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. -#### `get_flat_value` +#### `get_flat_value` ```python get_flat_value(self, key: str, separator: str = '__') -> Any @@ -180,7 +180,7 @@ Query recursively a value defined in a flat notation (string), on a hierarchy of name__value module.object.value -#### `extract` +#### `extract` ```python extract(self, params: dict[str, str]) -> dict[str, Any] @@ -188,37 +188,37 @@ extract(self, params: dict[str, str]) -> dict[str, Any] Extract some data points defined in a flat notation. -### `InfrahubNodeSync` +### `InfrahubNodeSync` Represents a Infrahub node in a synchronous context. **Methods:** -#### `from_graphql` +#### `from_graphql` ```python from_graphql(cls, client: InfrahubClientSync, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self ``` -#### `generate` +#### `generate` ```python generate(self, nodes: list[str] | None = None) -> None ``` -#### `artifact_generate` +#### `artifact_generate` ```python artifact_generate(self, name: str) -> None ``` -#### `artifact_fetch` +#### `artifact_fetch` ```python artifact_fetch(self, name: str) -> str | dict[str, Any] ``` -#### `download_file` +#### `download_file` ```python download_file(self, dest: Path | None = None) -> bytes | int @@ -256,25 +256,25 @@ The node must have been saved (have an id) before calling this method. >>> bytes_written = contract.download_file(dest=Path("/tmp/contract.pdf")) ``` -#### `delete` +#### `delete` ```python delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `save` +#### `save` ```python save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `generate_query_data` +#### `generate_query_data` ```python generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] ``` -#### `generate_query_data_node` +#### `generate_query_data_node` ```python generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] @@ -296,31 +296,31 @@ Generate the node part of a GraphQL Query with attributes and nodes. - dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format -#### `add_relationships` +#### `add_relationships` ```python add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `remove_relationships` +#### `remove_relationships` ```python remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None ``` -#### `create` +#### `create` ```python create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `update` +#### `update` ```python update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None ``` -#### `get_pool_allocated_resources` +#### `get_pool_allocated_resources` ```python get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync] @@ -336,7 +336,7 @@ Fetch all nodes that were allocated for the pool and a given resource. - list\[InfrahubNodeSync]: The allocated nodes. -#### `get_pool_resources_utilization` +#### `get_pool_resources_utilization` ```python get_pool_resources_utilization(self) -> list[dict[str, Any]] @@ -348,7 +348,7 @@ Fetch the utilization of each resource for the pool. - list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. -#### `get_flat_value` +#### `get_flat_value` ```python get_flat_value(self, key: str, separator: str = '__') -> Any @@ -361,7 +361,7 @@ Query recursively a value defined in a flat notation (string), on a hierarchy of name__value module.object.value -#### `extract` +#### `extract` ```python extract(self, params: dict[str, str]) -> dict[str, Any] @@ -369,49 +369,49 @@ extract(self, params: dict[str, str]) -> dict[str, Any] Extract some data points defined in a flat notation. -### `InfrahubNodeBase` +### `InfrahubNodeBase` Base class for InfrahubNode and InfrahubNodeSync **Methods:** -#### `get_branch` +#### `get_branch` ```python get_branch(self) -> str ``` -#### `get_path_value` +#### `get_path_value` ```python get_path_value(self, path: str) -> Any ``` -#### `get_human_friendly_id` +#### `get_human_friendly_id` ```python get_human_friendly_id(self) -> list[str] | None ``` -#### `get_human_friendly_id_as_string` +#### `get_human_friendly_id_as_string` ```python get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None ``` -#### `hfid` +#### `hfid` ```python hfid(self) -> list[str] | None ``` -#### `hfid_str` +#### `hfid_str` ```python hfid_str(self) -> str | None ``` -#### `get_node_metadata` +#### `get_node_metadata` ```python get_node_metadata(self) -> NodeMetadata | None @@ -419,37 +419,37 @@ get_node_metadata(self) -> NodeMetadata | None Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched. -#### `get_kind` +#### `get_kind` ```python get_kind(self) -> str ``` -#### `get_all_kinds` +#### `get_all_kinds` ```python get_all_kinds(self) -> list[str] ``` -#### `is_ip_prefix` +#### `is_ip_prefix` ```python is_ip_prefix(self) -> bool ``` -#### `is_ip_address` +#### `is_ip_address` ```python is_ip_address(self) -> bool ``` -#### `is_resource_pool` +#### `is_resource_pool` ```python is_resource_pool(self) -> bool ``` -#### `is_file_object` +#### `is_file_object` ```python is_file_object(self) -> bool @@ -457,7 +457,7 @@ is_file_object(self) -> bool Check if this node inherits from CoreFileObject and supports file uploads. -#### `upload_from_path` +#### `upload_from_path` ```python upload_from_path(self, path: Path) -> None @@ -475,7 +475,7 @@ The file will be streamed during upload, avoiding loading the entire file into m - `FeatureNotSupportedError`: If this node doesn't inherit from CoreFileObject. -#### `upload_from_bytes` +#### `upload_from_bytes` ```python upload_from_bytes(self, content: bytes | BinaryIO, name: str) -> None @@ -505,7 +505,7 @@ Using BinaryIO is recommended for large content to stream during upload. ... node.upload_from_bytes(content=f, name="file.bin") ``` -#### `clear_file` +#### `clear_file` ```python clear_file(self) -> None @@ -513,13 +513,13 @@ clear_file(self) -> None Clear any pending file content. -#### `get_raw_graphql_data` +#### `get_raw_graphql_data` ```python get_raw_graphql_data(self) -> dict | None ``` -#### `generate_query_data_init` +#### `generate_query_data_init` ```python generate_query_data_init(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx index 74d79816..f70c6788 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx @@ -7,7 +7,7 @@ sidebarTitle: parsers ## Functions -### `parse_human_friendly_id` +### `parse_human_friendly_id` ```python parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]] diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx index d4ace16d..a7400483 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx @@ -7,6 +7,6 @@ sidebarTitle: property ## Classes -### `NodeProperty` +### `NodeProperty` Represents a property of a node, typically used for metadata like display labels. diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx index 8e03ce0f..edc1112c 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx @@ -7,61 +7,61 @@ sidebarTitle: related_node ## Classes -### `RelatedNodeBase` +### `RelatedNodeBase` Base class for representing a related node in a relationship. **Methods:** -#### `id` +#### `id` ```python id(self) -> str | None ``` -#### `hfid` +#### `hfid` ```python hfid(self) -> list[Any] | None ``` -#### `hfid_str` +#### `hfid_str` ```python hfid_str(self) -> str | None ``` -#### `is_resource_pool` +#### `is_resource_pool` ```python is_resource_pool(self) -> bool ``` -#### `initialized` +#### `initialized` ```python initialized(self) -> bool ``` -#### `display_label` +#### `display_label` ```python display_label(self) -> str | None ``` -#### `typename` +#### `typename` ```python typename(self) -> str | None ``` -#### `kind` +#### `kind` ```python kind(self) -> str | None ``` -#### `is_from_profile` +#### `is_from_profile` ```python is_from_profile(self) -> bool @@ -69,7 +69,7 @@ is_from_profile(self) -> bool Return whether this relationship was set from a profile. Done by checking if the source is of a profile kind. -#### `get_relationship_metadata` +#### `get_relationship_metadata` ```python get_relationship_metadata(self) -> RelationshipMetadata | None @@ -77,49 +77,49 @@ get_relationship_metadata(self) -> RelationshipMetadata | None Returns the relationship metadata (updated_at, updated_by) if fetched. -### `RelatedNode` +### `RelatedNode` Represents a RelatedNodeBase in an asynchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self, timeout: int | None = None) -> None ``` -#### `peer` +#### `peer` ```python peer(self) -> InfrahubNode ``` -#### `get` +#### `get` ```python get(self) -> InfrahubNode ``` -### `RelatedNodeSync` +### `RelatedNodeSync` Represents a related node in a synchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self, timeout: int | None = None) -> None ``` -#### `peer` +#### `peer` ```python peer(self) -> InfrahubNodeSync ``` -#### `get` +#### `get` ```python get(self) -> InfrahubNodeSync diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx index fec5df0e..567b7c8d 100644 --- a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx @@ -7,37 +7,37 @@ sidebarTitle: relationship ## Classes -### `RelationshipManagerBase` +### `RelationshipManagerBase` Base class for RelationshipManager and RelationshipManagerSync **Methods:** -#### `peer_ids` +#### `peer_ids` ```python peer_ids(self) -> list[str] ``` -#### `peer_hfids` +#### `peer_hfids` ```python peer_hfids(self) -> list[list[Any]] ``` -#### `peer_hfids_str` +#### `peer_hfids_str` ```python peer_hfids_str(self) -> list[str] ``` -#### `has_update` +#### `has_update` ```python has_update(self) -> bool ``` -#### `is_from_profile` +#### `is_from_profile` ```python is_from_profile(self) -> bool @@ -45,19 +45,19 @@ is_from_profile(self) -> bool Return whether this relationship was set from a profile. All its peers must be from a profile. -### `RelationshipManager` +### `RelationshipManager` Manages relationships of a node in an asynchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self) -> None ``` -#### `add` +#### `add` ```python add(self, data: str | RelatedNode | dict) -> None @@ -65,7 +65,7 @@ add(self, data: str | RelatedNode | dict) -> None Add a new peer to this relationship. -#### `extend` +#### `extend` ```python extend(self, data: Iterable[str | RelatedNode | dict]) -> None @@ -73,25 +73,25 @@ extend(self, data: Iterable[str | RelatedNode | dict]) -> None Add new peers to this relationship. -#### `remove` +#### `remove` ```python remove(self, data: str | RelatedNode | dict) -> None ``` -### `RelationshipManagerSync` +### `RelationshipManagerSync` Manages relationships of a node in a synchronous context. **Methods:** -#### `fetch` +#### `fetch` ```python fetch(self) -> None ``` -#### `add` +#### `add` ```python add(self, data: str | RelatedNodeSync | dict) -> None @@ -99,7 +99,7 @@ add(self, data: str | RelatedNodeSync | dict) -> None Add a new peer to this relationship. -#### `extend` +#### `extend` ```python extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None @@ -107,7 +107,7 @@ extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None Add new peers to this relationship. -#### `remove` +#### `remove` ```python remove(self, data: str | RelatedNodeSync | dict) -> None diff --git a/specs/001-end-user-cli/contracts/cli-commands.md b/specs/001-end-user-cli/contracts/cli-commands.md index 0abc8aa1..49144cc3 100644 --- a/specs/001-end-user-cli/contracts/cli-commands.md +++ b/specs/001-end-user-cli/contracts/cli-commands.md @@ -3,6 +3,7 @@ ## Global Options All commands accept: + - `--branch TEXT` — Target Infrahub branch (default: from config) - `--config-file PATH` — Configuration file path (default: infrahubctl.toml) - `--output [table|json|csv|yaml]` — Output format (default: table if TTY, json if piped) @@ -10,11 +11,13 @@ All commands accept: ## `infrahub get [identifier]` **List mode** (no identifier): + - Input: kind (positional), --filter (repeatable), --limit INT, --offset INT - Output: Table with columns for each attribute + relationship (display names) - Exit 0: results found | Exit 0: no results (empty table) | Exit 1: invalid kind **Detail mode** (with identifier): + - Input: kind (positional), identifier (positional — UUID or display name) - Output: Key-value display of all attributes, relationships, metadata - Exit 0: found | Exit 1: not found diff --git a/specs/001-end-user-cli/data-model.md b/specs/001-end-user-cli/data-model.md index e83b843f..7baaeb60 100644 --- a/specs/001-end-user-cli/data-model.md +++ b/specs/001-end-user-cli/data-model.md @@ -11,12 +11,14 @@ The CLI introduces transient structures for formatting and serialization: Used when serializing query results to YAML output format. **Fields**: + - `apiVersion` (str): Always `"infrahub.app/v1"` - `kind` (str): Always `"Object"` - `spec.kind` (str): The Infrahub Kind being exported (e.g., `"InfraDevice"`) - `spec.data` (list[dict]): Array of serialized node objects Each node in `spec.data` contains: + - Attribute fields as `key: value` pairs - Relationship fields as `key: display_name` (single) or `key: {data: [list]}` (many) @@ -32,6 +34,7 @@ Parses `--set key=value` arguments into a dict suitable for SDK calls. **Output**: `dict[str, str | list[str]]` Validation rules: + - Key MUST exist as an attribute or relationship name in the target Kind's schema - Value is a string; the SDK handles type coercion - For relationships, value is the display name or UUID of the target node @@ -44,5 +47,6 @@ Parses `--filter key=value` arguments into kwargs for `client.filters()`. **Output**: `dict[str, Any]` passed as `**kwargs` Validation rules: + - Key MUST follow the `attribute__value` or `relationship__id` pattern - Invalid keys produce a validation error with available field names From 52c2b0587f38f7b33c1043e4274db8847b08bd09 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 15:48:42 +0000 Subject: [PATCH 05/32] fix: resolve CI failures in integration tests, markdown lint, and docs - Add type coercion in parse_set_args (int, float, bool, null) so --set height=190 sends an integer instead of string "190" - Fix MD032 blanks-around-lists in cli-commands.md and data-model.md - Revert SDK ref docs to stable baseline (CI regenerates with its own mdxify version) - Add error output to integration test assertions for debugging --- infrahub_sdk/ctl/parsers.py | 51 +++++++++++++++++++++++---- tests/integration/test_enduser_cli.py | 4 +-- tests/unit/ctl/test_parsers.py | 50 ++++++++++++++++++++++++++ 3 files changed, 97 insertions(+), 8 deletions(-) diff --git a/infrahub_sdk/ctl/parsers.py b/infrahub_sdk/ctl/parsers.py index e68baa8a..6d7823d7 100644 --- a/infrahub_sdk/ctl/parsers.py +++ b/infrahub_sdk/ctl/parsers.py @@ -5,22 +5,61 @@ import typer -def parse_set_args(set_args: list[str]) -> dict[str, str]: +def _coerce_value(value: str) -> Any: + """Attempt to coerce a string value to an appropriate Python type. + + Tries, in order: int, float, bool (true/false), None (null/none). + Falls back to the original string if no conversion matches. + + Args: + value: The raw string value from the CLI. + + Returns: + The coerced Python value. + """ + # Try integer + try: + return int(value) + except ValueError: + pass + + # Try float + try: + return float(value) + except ValueError: + pass + + # Try boolean + lower = value.lower() + if lower in {"true", "yes"}: + return True + if lower in {"false", "no"}: + return False + + # Try null + if lower in {"null", "none"}: + return None + + return value + + +def parse_set_args(set_args: list[str]) -> dict[str, Any]: """Parse --set key=value arguments into a dictionary. Splits each argument on the first ``=`` sign, allowing values - to contain additional ``=`` characters. + to contain additional ``=`` characters. Values are automatically + coerced to int, float, bool, or None where possible. Args: set_args: List of "key=value" strings from the CLI. Returns: - Dictionary mapping field names to string values. + Dictionary mapping field names to coerced Python values. Raises: typer.BadParameter: If any argument is not in key=value format. """ - result: dict[str, str] = {} + result: dict[str, Any] = {} for arg in set_args: if "=" not in arg: raise typer.BadParameter(f"Invalid format '{arg}'. Expected key=value.") @@ -28,7 +67,7 @@ def parse_set_args(set_args: list[str]) -> dict[str, str]: key = key.strip() if not key: raise typer.BadParameter(f"Invalid format '{arg}'. Key must not be empty.") - result[key] = value + result[key] = _coerce_value(value) return result @@ -62,7 +101,7 @@ def parse_filter_args(filter_args: list[str]) -> dict[str, Any]: def validate_set_fields( - data: dict[str, str], + data: dict[str, Any], attribute_names: list[str], relationship_names: list[str], ) -> None: diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py index a01b73d0..0d93b13c 100644 --- a/tests/integration/test_enduser_cli.py +++ b/tests/integration/test_enduser_cli.py @@ -179,7 +179,7 @@ def test_create_inline(self, base_dataset: None) -> None: app, ["create", "TestingPerson", "--set", "name=Integration Test Person", "--set", "height=190"], ) - assert result.exit_code == 0 + assert result.exit_code == 0, f"create failed: {result.output}" assert "Created" in result.stdout async def test_create_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: @@ -199,7 +199,7 @@ def test_update_inline(self, base_dataset: None) -> None: app, ["update", "TestingPerson", "Sophia Walker", "--set", "height=175"], ) - assert result.exit_code == 0 + assert result.exit_code == 0, f"update failed: {result.output}" assert "Updated" in result.stdout async def test_update_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: diff --git a/tests/unit/ctl/test_parsers.py b/tests/unit/ctl/test_parsers.py index 1a056716..2cb43f12 100644 --- a/tests/unit/ctl/test_parsers.py +++ b/tests/unit/ctl/test_parsers.py @@ -8,6 +8,50 @@ from infrahub_sdk.ctl.parsers import parse_filter_args, parse_set_args, validate_set_fields +class TestCoerceValue: + """Tests for type coercion via parse_set_args.""" + + def test_integer(self) -> None: + result = parse_set_args(["count=42"]) + assert result["count"] == 42 + assert isinstance(result["count"], int) + + def test_negative_integer(self) -> None: + result = parse_set_args(["offset=-10"]) + assert result["offset"] == -10 + + def test_float(self) -> None: + result = parse_set_args(["ratio=2.5"]) + assert result["ratio"] == 2.5 + assert isinstance(result["ratio"], float) + + def test_bool_true(self) -> None: + assert parse_set_args(["flag=true"])["flag"] is True + assert parse_set_args(["flag=True"])["flag"] is True + assert parse_set_args(["flag=yes"])["flag"] is True + + def test_bool_false(self) -> None: + assert parse_set_args(["flag=false"])["flag"] is False + assert parse_set_args(["flag=False"])["flag"] is False + assert parse_set_args(["flag=no"])["flag"] is False + + def test_null(self) -> None: + assert parse_set_args(["field=null"])["field"] is None + assert parse_set_args(["field=none"])["field"] is None + assert parse_set_args(["field=None"])["field"] is None + + def test_string_passthrough(self) -> None: + assert parse_set_args(["name=router1"])["name"] == "router1" + + def test_string_with_spaces(self) -> None: + assert parse_set_args(["name=my device"])["name"] == "my device" + + def test_empty_string(self) -> None: + result = parse_set_args(["name="]) + assert not result["name"] + assert isinstance(result["name"], str) + + class TestParseSetArgs: """Tests for parse_set_args.""" @@ -26,6 +70,12 @@ def test_value_containing_equals_sign(self) -> None: result = parse_set_args(["description=a=b=c"]) assert result == {"description": "a=b=c"} + def test_numeric_value_coerced(self) -> None: + """Test that numeric string values are coerced to int/float.""" + result = parse_set_args(["height=190"]) + assert result == {"height": 190} + assert isinstance(result["height"], int) + def test_empty_list(self) -> None: """Test parse_set_args with an empty list returns an empty dict.""" result = parse_set_args([]) From a941e57cf36cafd7c081d98e2e75c921e6e5891f Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 18:34:20 +0000 Subject: [PATCH 06/32] refactor: move end-user CLI commands under infrahubctl Instead of a separate `infrahub` entry point, register get, create, update, and delete as top-level commands on the existing `infrahubctl` CLI. Add schema list and schema show as subcommands of the existing `infrahubctl schema` group alongside load, check, and export. - Remove infrahub entry point from pyproject.toml - Remove enduser_cli.py and enduser_commands.py - Register commands in cli_commands.py - Merge schema list/show into ctl/schema.py - Update all tests to use cli_commands.app --- infrahub_sdk/ctl/cli_commands.py | 8 ++ infrahub_sdk/ctl/commands/schema.py | 132 ------------------------- infrahub_sdk/ctl/enduser_cli.py | 19 ---- infrahub_sdk/ctl/enduser_commands.py | 54 ---------- infrahub_sdk/ctl/schema.py | 110 ++++++++++++++++++++- pyproject.toml | 1 - tests/integration/test_enduser_cli.py | 2 +- tests/unit/ctl/commands/test_create.py | 2 +- tests/unit/ctl/commands/test_delete.py | 2 +- tests/unit/ctl/commands/test_get.py | 2 +- tests/unit/ctl/commands/test_schema.py | 24 ++--- tests/unit/ctl/commands/test_update.py | 2 +- 12 files changed, 134 insertions(+), 224 deletions(-) delete mode 100644 infrahub_sdk/ctl/commands/schema.py delete mode 100644 infrahub_sdk/ctl/enduser_cli.py delete mode 100644 infrahub_sdk/ctl/enduser_commands.py diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index d7a636ed..78e475c7 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -51,6 +51,10 @@ from ..template.exceptions import JinjaTemplateError from ..utils import write_to_file from ..yaml import SchemaFile +from .commands.create import create_command +from .commands.delete import delete_command +from .commands.get import get_command +from .commands.update import update_command from .exporter import dump from .importer import load from .parameters import CONFIG_PARAM @@ -71,6 +75,10 @@ app.command(name="dump")(dump) app.command(name="load")(load) +app.command(name="get")(get_command) +app.command(name="create")(create_command) +app.command(name="update")(update_command) +app.command(name="delete")(delete_command) console = Console() diff --git a/infrahub_sdk/ctl/commands/schema.py b/infrahub_sdk/ctl/commands/schema.py deleted file mode 100644 index 86990b1d..00000000 --- a/infrahub_sdk/ctl/commands/schema.py +++ /dev/null @@ -1,132 +0,0 @@ -"""``infrahub schema`` subcommand group -- explore the Infrahub schema. - -Provides ``list`` and ``show`` subcommands for inspecting schema kinds -and their attributes and relationships. -""" - -from __future__ import annotations - -import typer -from rich.console import Console -from rich.table import Table - -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.parameters import CONFIG_PARAM -from infrahub_sdk.ctl.utils import catch_exception -from infrahub_sdk.schema import NodeSchemaAPI - -app = AsyncTyper() -console = Console() - - -@app.callback() -def callback() -> None: - """Explore the Infrahub schema.""" - - -@app.command(name="list") -@catch_exception(console=console) -async def schema_list( - filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name substring"), - branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), - _: str = CONFIG_PARAM, -) -> None: - """List all available schema kinds. - - Fetches the full schema from the Infrahub instance and displays a - table of ``NodeSchemaAPI`` entries. Use ``--filter`` to narrow results - by a case-insensitive substring match on the kind name. - - Args: - filter_text: Optional substring to filter kind names. - branch: Target branch name. - _: Configuration file path (handled by callback). - """ - client = initialize_client(branch=branch) - schemas = await client.schema.all(branch=branch) - - items = list(schemas.values()) - if filter_text: - items = [s for s in items if filter_text.lower() in s.kind.lower()] - - items = [s for s in items if isinstance(s, NodeSchemaAPI)] - items.sort(key=lambda s: s.kind) - - table = Table(title="Schema Kinds") - table.add_column("Namespace") - table.add_column("Name") - table.add_column("Kind") - table.add_column("Description") - - for schema in items: - table.add_row( - schema.namespace, - schema.name, - schema.kind, - schema.description or "", - ) - - console.print(table) - - -@app.command(name="show") -@catch_exception(console=console) -async def schema_show( - kind: str = typer.Argument(..., help="Schema kind to display"), - branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), - _: str = CONFIG_PARAM, -) -> None: - """Show details for a specific schema kind. - - Displays metadata, attributes, and relationships for the requested - schema kind in a human-readable format. - - Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - branch: Target branch name. - _: Configuration file path (handled by callback). - """ - client = initialize_client(branch=branch) - schema = await client.schema.get(kind=kind, branch=branch) - - console.print(f"\n[bold]{schema.kind}[/bold]") - if schema.description: - console.print(f" {schema.description}") - console.print(f" Namespace: {schema.namespace}") - console.print(f" Display Labels: {schema.display_labels or 'N/A'}") - console.print(f" Human Friendly ID: {schema.human_friendly_id or 'N/A'}") - - if schema.attributes: - attr_table = Table(title="Attributes") - attr_table.add_column("Name") - attr_table.add_column("Type") - attr_table.add_column("Required") - attr_table.add_column("Default") - attr_table.add_column("Description") - - for attr in schema.attributes: - attr_table.add_row( - attr.name, - str(attr.kind), - "Yes" if not attr.optional else "No", - str(attr.default_value) if attr.default_value is not None else "", - attr.description or "", - ) - console.print(attr_table) - - if schema.relationships: - rel_table = Table(title="Relationships") - rel_table.add_column("Name") - rel_table.add_column("Peer") - rel_table.add_column("Cardinality") - rel_table.add_column("Optional") - - for rel in schema.relationships: - rel_table.add_row( - rel.name, - rel.peer, - rel.cardinality, - "Yes" if rel.optional else "No", - ) - console.print(rel_table) diff --git a/infrahub_sdk/ctl/enduser_cli.py b/infrahub_sdk/ctl/enduser_cli.py deleted file mode 100644 index 8f617520..00000000 --- a/infrahub_sdk/ctl/enduser_cli.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Entry point for the ``infrahub`` end-user CLI. - -This module mirrors the pattern used by ``infrahub_sdk.ctl.cli`` but loads -the end-user command set from ``enduser_commands`` instead. -""" - -from __future__ import annotations - -import sys - -try: - from .enduser_commands import app -except ImportError as exc: - sys.exit( - f"Module {exc.name} is not available, install the 'ctl' extra of the infrahub-sdk package, " - f"`pip install 'infrahub-sdk[ctl]'` or run `uv sync --extra ctl`." - ) - -__all__ = ["app"] diff --git a/infrahub_sdk/ctl/enduser_commands.py b/infrahub_sdk/ctl/enduser_commands.py deleted file mode 100644 index 7e6f710f..00000000 --- a/infrahub_sdk/ctl/enduser_commands.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Main command registration for the ``infrahub`` end-user CLI. - -Registers top-level commands (get, create, update, delete) and the ``schema`` -subcommand group. A ``--version`` flag is available on the root command. -""" - -from __future__ import annotations - -import typer - -from infrahub_sdk import __version__ as sdk_version -from infrahub_sdk.async_typer import AsyncTyper -from infrahub_sdk.ctl.commands.create import create_command -from infrahub_sdk.ctl.commands.delete import delete_command -from infrahub_sdk.ctl.commands.get import get_command -from infrahub_sdk.ctl.commands.schema import app as schema_app -from infrahub_sdk.ctl.commands.update import update_command - -app = AsyncTyper(pretty_exceptions_show_locals=False) - - -def _version_callback(value: bool) -> None: - """Print the SDK version and exit. - - Args: - value: Whether the ``--version`` flag was passed. - - Raises: - typer.Exit: Always raised after printing the version. - """ - if value: - typer.echo(f"infrahub v{sdk_version}") - raise typer.Exit - - -@app.callback(invoke_without_command=True) -def main( - version: bool | None = typer.Option( - None, - "--version", - "-V", - help="Show the SDK version and exit.", - callback=_version_callback, - is_eager=True, - ), -) -> None: - """Infrahub CLI -- interact with an Infrahub instance from the command line.""" - - -app.command(name="get")(get_command) -app.command(name="create")(create_command) -app.command(name="update")(update_command) -app.command(name="delete")(delete_command) -app.add_typer(schema_app, name="schema") diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 9532959e..8d5ea73e 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -10,12 +10,13 @@ import yaml from pydantic import ValidationError from rich.console import Console +from rich.table import Table from ..async_typer import AsyncTyper from ..ctl.client import initialize_client from ..ctl.utils import catch_exception, init_logging from ..queries import SCHEMA_HASH_SYNC_STATUS -from ..schema import SchemaWarning +from ..schema import NodeSchemaAPI, SchemaWarning from ..yaml import SchemaFile from .parameters import CONFIG_PARAM from .utils import load_yamlfile_from_disk_and_exit @@ -258,3 +259,110 @@ async def export( console.print(f"[green] Exported namespace '{ns}' to {output_file}") console.print(f"[green] Schema exported to {directory}") + + +@app.command(name="list") +@catch_exception(console=console) +async def schema_list( + filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name substring"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """List all available schema kinds. + + Fetches the full schema from the Infrahub instance and displays a + table of node schema entries. Use ``--filter`` to narrow results + by a case-insensitive substring match on the kind name. + + Args: + filter_text: Optional substring to filter kind names. + branch: Target branch name. + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + schemas = await client.schema.all(branch=branch) + + items = list(schemas.values()) + if filter_text: + items = [s for s in items if filter_text.lower() in s.kind.lower()] + + items = [s for s in items if isinstance(s, NodeSchemaAPI)] + items.sort(key=lambda s: s.kind) + + table = Table(title="Schema Kinds") + table.add_column("Namespace") + table.add_column("Name") + table.add_column("Kind") + table.add_column("Description") + + for schema_item in items: + table.add_row( + schema_item.namespace, + schema_item.name, + schema_item.kind, + schema_item.description or "", + ) + + console.print(table) + + +@app.command(name="show") +@catch_exception(console=console) +async def schema_show( + kind: str = typer.Argument(..., help="Schema kind to display"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Show details for a specific schema kind. + + Displays metadata, attributes, and relationships for the requested + schema kind in a human-readable format. + + Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + branch: Target branch name. + _: Configuration file path (handled by callback). + """ + client = initialize_client(branch=branch) + node_schema = await client.schema.get(kind=kind, branch=branch) + + console.print(f"\n[bold]{node_schema.kind}[/bold]") + if node_schema.description: + console.print(f" {node_schema.description}") + console.print(f" Namespace: {node_schema.namespace}") + console.print(f" Display Labels: {node_schema.display_labels or 'N/A'}") + console.print(f" Human Friendly ID: {node_schema.human_friendly_id or 'N/A'}") + + if node_schema.attributes: + attr_table = Table(title="Attributes") + attr_table.add_column("Name") + attr_table.add_column("Type") + attr_table.add_column("Required") + attr_table.add_column("Default") + attr_table.add_column("Description") + + for attr in node_schema.attributes: + attr_table.add_row( + attr.name, + str(attr.kind), + "Yes" if not attr.optional else "No", + str(attr.default_value) if attr.default_value is not None else "", + attr.description or "", + ) + console.print(attr_table) + + if node_schema.relationships: + rel_table = Table(title="Relationships") + rel_table.add_column("Name") + rel_table.add_column("Peer") + rel_table.add_column("Cardinality") + rel_table.add_column("Optional") + + for rel in node_schema.relationships: + rel_table.add_row( + rel.name, + rel.peer, + rel.cardinality, + "Yes" if rel.optional else "No", + ) + console.print(rel_table) diff --git a/pyproject.toml b/pyproject.toml index 9b7f0805..4e0716a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,6 @@ Documentation = "https://docs.infrahub.app/python-sdk/introduction" [project.scripts] infrahubctl = "infrahub_sdk.ctl.cli:app" -infrahub = "infrahub_sdk.ctl.enduser_cli:app" [project.entry-points."pytest11"] "pytest-infrahub" = "infrahub_sdk.pytest_plugin.plugin" diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py index 0d93b13c..d6ebfa02 100644 --- a/tests/integration/test_enduser_cli.py +++ b/tests/integration/test_enduser_cli.py @@ -16,7 +16,7 @@ from typer.testing import CliRunner from infrahub_sdk.ctl import config -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app from infrahub_sdk.ctl.parameters import load_configuration from infrahub_sdk.testing.docker import TestInfrahubDockerClient from infrahub_sdk.testing.schemas.animal import SchemaAnimal diff --git a/tests/unit/ctl/commands/test_create.py b/tests/unit/ctl/commands/test_create.py index 83d937bf..05f81a43 100644 --- a/tests/unit/ctl/commands/test_create.py +++ b/tests/unit/ctl/commands/test_create.py @@ -7,7 +7,7 @@ import pytest from typer.testing import CliRunner -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app runner = CliRunner() diff --git a/tests/unit/ctl/commands/test_delete.py b/tests/unit/ctl/commands/test_delete.py index 4f5c5d0f..6bc9caaa 100644 --- a/tests/unit/ctl/commands/test_delete.py +++ b/tests/unit/ctl/commands/test_delete.py @@ -6,7 +6,7 @@ from typer.testing import CliRunner -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app runner = CliRunner() diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py index 4b7e4975..17c43e33 100644 --- a/tests/unit/ctl/commands/test_get.py +++ b/tests/unit/ctl/commands/test_get.py @@ -7,7 +7,7 @@ import pytest from typer.testing import CliRunner -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app runner = CliRunner() diff --git a/tests/unit/ctl/commands/test_schema.py b/tests/unit/ctl/commands/test_schema.py index 4303d57a..d1d60d21 100644 --- a/tests/unit/ctl/commands/test_schema.py +++ b/tests/unit/ctl/commands/test_schema.py @@ -6,7 +6,7 @@ from typer.testing import CliRunner -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app from infrahub_sdk.schema import NodeSchemaAPI runner = CliRunner() @@ -112,7 +112,7 @@ def test_schema_list_returns_table() -> None: mock_client = MagicMock() mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "InfraInterface": interface_schema}) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "list"]) assert result.exit_code == 0, result.stdout @@ -129,7 +129,7 @@ def test_schema_list_with_filter() -> None: mock_client = MagicMock() mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "IpamPrefix": prefix_schema}) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "list", "--filter", "infra"]) assert result.exit_code == 0, result.stdout @@ -142,7 +142,7 @@ def test_schema_list_empty() -> None: mock_client = MagicMock() mock_client.schema.all = AsyncMock(return_value={}) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "list"]) assert result.exit_code == 0, result.stdout @@ -155,7 +155,7 @@ def test_schema_list_with_branch() -> None: mock_client = MagicMock() mock_client.schema.all = AsyncMock(return_value={"CoreAccount": schema}) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "list", "--branch", "feature-x"]) assert result.exit_code == 0, result.stdout @@ -172,7 +172,7 @@ def test_schema_list_skips_non_node_schema_entries() -> None: mock_client = MagicMock() mock_client.schema.all = AsyncMock(return_value={"InfraDevice": node_schema, "SomeGenericKind": generic_schema}) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "list"]) assert result.exit_code == 0, result.stdout @@ -225,7 +225,7 @@ def test_schema_show_displays_metadata() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice"]) assert result.exit_code == 0, result.stdout @@ -245,7 +245,7 @@ def test_schema_show_displays_attributes() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice"]) assert result.exit_code == 0, result.stdout @@ -269,7 +269,7 @@ def test_schema_show_displays_relationships() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice"]) assert result.exit_code == 0, result.stdout @@ -288,7 +288,7 @@ def test_schema_show_no_attributes_or_relationships() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice"]) assert result.exit_code == 0, result.stdout @@ -302,7 +302,7 @@ def test_schema_show_with_branch() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice", "--branch", "feature-x"]) assert result.exit_code == 0, result.stdout @@ -316,7 +316,7 @@ def test_schema_show_attribute_with_default_value() -> None: mock_client = MagicMock() mock_client.schema.get = AsyncMock(return_value=schema) - with patch("infrahub_sdk.ctl.commands.schema.initialize_client", return_value=mock_client): + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): result = runner.invoke(app, ["schema", "show", "InfraDevice"]) assert result.exit_code == 0, result.stdout diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py index d19aaf0d..549179e3 100644 --- a/tests/unit/ctl/commands/test_update.py +++ b/tests/unit/ctl/commands/test_update.py @@ -7,7 +7,7 @@ import pytest from typer.testing import CliRunner -from infrahub_sdk.ctl.enduser_cli import app +from infrahub_sdk.ctl.cli_commands import app runner = CliRunner() From b8d30b4f0676b7168fb5674c038c1869e84e2f81 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 19:10:26 +0000 Subject: [PATCH 07/32] fix: resolve doc generation and integration test failures - Expose command functions as module-level names (get, create, update, delete) in cli_commands.py so typer --func can find them for doc gen - Generate CLI docs for new commands (get, create, update, delete) and updated schema docs (list, show subcommands added) - Fix test_version to use 'version' subcommand instead of --version flag --- docs/docs/infrahubctl/infrahubctl-create.mdx | 34 ++++++++++++ docs/docs/infrahubctl/infrahubctl-delete.mdx | 34 ++++++++++++ docs/docs/infrahubctl/infrahubctl-get.mdx | 40 ++++++++++++++ docs/docs/infrahubctl/infrahubctl-schema.mdx | 56 ++++++++++++++++++++ docs/docs/infrahubctl/infrahubctl-update.mdx | 38 +++++++++++++ infrahub_sdk/ctl/cli_commands.py | 23 +++++--- tests/integration/test_enduser_cli.py | 6 +-- 7 files changed, 220 insertions(+), 11 deletions(-) create mode 100644 docs/docs/infrahubctl/infrahubctl-create.mdx create mode 100644 docs/docs/infrahubctl/infrahubctl-delete.mdx create mode 100644 docs/docs/infrahubctl/infrahubctl-get.mdx create mode 100644 docs/docs/infrahubctl/infrahubctl-update.mdx diff --git a/docs/docs/infrahubctl/infrahubctl-create.mdx b/docs/docs/infrahubctl/infrahubctl-create.mdx new file mode 100644 index 00000000..78796027 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-create.mdx @@ -0,0 +1,34 @@ +# `infrahubctl create` + +Create a new object in Infrahub. + +Supports two mutually exclusive modes: inline field assignment via +repeatable ``--set key=value`` options, or bulk creation from a +JSON/YAML object file via ``--file``. + +Args: + kind: The Infrahub schema kind to create (e.g. ``InfraDevice``). + set_args: Repeatable ``key=value`` pairs for inline field assignment. + file: Path to a JSON or YAML object file. + branch: Target branch for the operation. + _: Configuration file parameter (handled by callback). + +**Usage**: + +```console +$ infrahubctl create [OPTIONS] KIND +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind to create [required] + +**Options**: + +* `--set TEXT`: Field value in key=value format +* `-f, --file PATH`: JSON or YAML file with object data +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-delete.mdx b/docs/docs/infrahubctl/infrahubctl-delete.mdx new file mode 100644 index 00000000..138c7bd0 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-delete.mdx @@ -0,0 +1,34 @@ +# `infrahubctl delete` + +Delete an Infrahub object. + +Fetches the object identified by *identifier* (UUID or display name) and +deletes it. Unless ``--yes`` is provided, the user is prompted for +confirmation before the deletion is executed. + +Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: UUID or display name of the object to delete. + yes: When ``True``, skip the interactive confirmation prompt. + branch: Target branch name. + _: Configuration file path (handled by callback). + +**Usage**: + +```console +$ infrahubctl delete [OPTIONS] KIND IDENTIFIER +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind [required] +* `IDENTIFIER`: Object ID or display name [required] + +**Options**: + +* `-y, --yes`: Skip confirmation prompt +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx new file mode 100644 index 00000000..cebbebe8 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -0,0 +1,40 @@ +# `infrahubctl get` + +Query and display Infrahub objects. + +When *identifier* is omitted, lists all objects of the given *kind*, +optionally filtered by ``--filter`` arguments. When *identifier* is +provided, displays a single object in detail view. + +Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: UUID or display name for single-object lookup. + filter_args: Repeatable filters in ``attr__value=x`` format. + output: Explicit output format; auto-detected when omitted. + branch: Target branch name. + limit: Maximum number of results to return. + offset: Number of results to skip (pagination). + _: Configuration file path (handled by callback). + +**Usage**: + +```console +$ infrahubctl get [OPTIONS] KIND [IDENTIFIER] +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind to query [required] +* `[IDENTIFIER]`: Object ID or display name for detail view + +**Options**: + +* `--filter TEXT`: Filter in attr__value=x format +* `-o, --output [table|json|csv|yaml]`: Output format +* `-b, --branch TEXT`: Target branch +* `--limit INTEGER`: Maximum results +* `--offset INTEGER`: Skip first N results +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-schema.mdx b/docs/docs/infrahubctl/infrahubctl-schema.mdx index 1467eae8..2aa9461c 100644 --- a/docs/docs/infrahubctl/infrahubctl-schema.mdx +++ b/docs/docs/infrahubctl/infrahubctl-schema.mdx @@ -18,7 +18,9 @@ $ infrahubctl schema [OPTIONS] COMMAND [ARGS]... * `check`: Check if schema files are valid and what... * `export`: Export the schema from Infrahub as YAML... +* `list`: List all available schema kinds. * `load`: Load one or multiple schema files into... +* `show`: Show details for a specific schema kind. ## `infrahubctl schema check` @@ -60,6 +62,32 @@ $ infrahubctl schema export [OPTIONS] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. +## `infrahubctl schema list` + +List all available schema kinds. + +Fetches the full schema from the Infrahub instance and displays a +table of node schema entries. Use ``--filter`` to narrow results +by a case-insensitive substring match on the kind name. + +Args: + filter_text: Optional substring to filter kind names. + branch: Target branch name. + _: Configuration file path (handled by callback). + +**Usage**: + +```console +$ infrahubctl schema list [OPTIONS] +``` + +**Options**: + +* `--filter TEXT`: Filter kinds by name substring +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--help`: Show this message and exit. + ## `infrahubctl schema load` Load one or multiple schema files into Infrahub. @@ -81,3 +109,31 @@ $ infrahubctl schema load [OPTIONS] SCHEMAS... * `--wait INTEGER`: Time in seconds to wait until the schema has converged across all workers [default: 0] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. + +## `infrahubctl schema show` + +Show details for a specific schema kind. + +Displays metadata, attributes, and relationships for the requested +schema kind in a human-readable format. + +Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + branch: Target branch name. + _: Configuration file path (handled by callback). + +**Usage**: + +```console +$ infrahubctl schema show [OPTIONS] KIND +``` + +**Arguments**: + +* `KIND`: Schema kind to display [required] + +**Options**: + +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-update.mdx b/docs/docs/infrahubctl/infrahubctl-update.mdx new file mode 100644 index 00000000..87922ecd --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-update.mdx @@ -0,0 +1,38 @@ +# `infrahubctl update` + +Update an existing object in Infrahub. + +Fetches the object identified by KIND and IDENTIFIER, applies the +requested changes, and saves the updated object back to the server. + +Changes can be provided either as repeatable ``--set key=value`` +flags or via a ``--file`` pointing to a YAML/JSON object file. +The two modes are mutually exclusive. + +Args: + kind: Infrahub schema kind (e.g. ``InfraDevice``). + identifier: Object UUID or human-readable display name. + set_args: Repeatable key=value pairs for inline field updates. + file: Path to a YAML or JSON object file with update data. + branch: Target branch for the operation. + +**Usage**: + +```console +$ infrahubctl update [OPTIONS] KIND IDENTIFIER +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind [required] +* `IDENTIFIER`: Object ID or display name [required] + +**Options**: + +* `--set TEXT`: Field value in key=value format +* `-f, --file PATH`: JSON or YAML file with update data +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index 78e475c7..292bebdb 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -11,14 +11,14 @@ from pathlib import Path from typing import TYPE_CHECKING, Any -import typer -import ujson -from rich.console import Console -from rich.layout import Layout -from rich.logging import RichHandler -from rich.panel import Panel -from rich.pretty import Pretty -from rich.table import Table +import typer # pyright: ignore[reportMissingImports] +import ujson # pyright: ignore[reportMissingModuleSource] +from rich.console import Console # pyright: ignore[reportMissingImports] +from rich.layout import Layout # pyright: ignore[reportMissingImports] +from rich.logging import RichHandler # pyright: ignore[reportMissingImports] +from rich.panel import Panel # pyright: ignore[reportMissingImports] +from rich.pretty import Pretty # pyright: ignore[reportMissingImports] +from rich.table import Table # pyright: ignore[reportMissingImports] from .. import __version__ as sdk_version from ..async_typer import AsyncTyper @@ -80,6 +80,13 @@ app.command(name="update")(update_command) app.command(name="delete")(delete_command) +# Expose command functions under their command names for typer doc generation +# (typer --func looks up module-level names) +get = get_command +create = create_command +update = update_command +delete = delete_command + console = Console() diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py index d6ebfa02..4e90ba9c 100644 --- a/tests/integration/test_enduser_cli.py +++ b/tests/integration/test_enduser_cli.py @@ -71,10 +71,10 @@ class TestEnduserCliRead(_EnduserCliBase): """Read-only CLI tests: version, schema discovery, and get queries.""" def test_version(self) -> None: - """Verify the --version flag works without a server.""" - result = runner.invoke(app, ["--version"]) + """Verify the version subcommand works without a server.""" + result = runner.invoke(app, ["version"]) assert result.exit_code == 0 - assert "infrahub v" in result.stdout + assert "SDK" in result.stdout def test_schema_list(self, base_dataset: None) -> None: """List schema kinds and verify TestingPerson is present.""" From 4f465f92da51eb1481c261cc8a4e591e2147795e Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 19:17:02 +0000 Subject: [PATCH 08/32] fix: resolve vale spelling errors in generated CLI docs Remove Args sections from command docstrings since typer generates help text from Option/Argument annotations. The Args sections leaked Python parameter names (filter_args, set_args, filter_text) and technical terms (substring) into the generated MDX docs, triggering vale spelling errors in CI. --- docs/docs/infrahubctl/infrahubctl-create.mdx | 12 ++---------- docs/docs/infrahubctl/infrahubctl-delete.mdx | 12 ++---------- docs/docs/infrahubctl/infrahubctl-get.mdx | 15 ++------------- docs/docs/infrahubctl/infrahubctl-schema.mdx | 17 +++-------------- docs/docs/infrahubctl/infrahubctl-update.mdx | 12 +----------- infrahub_sdk/ctl/commands/create.py | 12 ++---------- infrahub_sdk/ctl/commands/delete.py | 12 ++---------- infrahub_sdk/ctl/commands/get.py | 15 ++------------- infrahub_sdk/ctl/commands/update.py | 12 +----------- infrahub_sdk/ctl/schema.py | 17 +++-------------- 10 files changed, 20 insertions(+), 116 deletions(-) diff --git a/docs/docs/infrahubctl/infrahubctl-create.mdx b/docs/docs/infrahubctl/infrahubctl-create.mdx index 78796027..b7fcb512 100644 --- a/docs/docs/infrahubctl/infrahubctl-create.mdx +++ b/docs/docs/infrahubctl/infrahubctl-create.mdx @@ -2,16 +2,8 @@ Create a new object in Infrahub. -Supports two mutually exclusive modes: inline field assignment via -repeatable ``--set key=value`` options, or bulk creation from a -JSON/YAML object file via ``--file``. - -Args: - kind: The Infrahub schema kind to create (e.g. ``InfraDevice``). - set_args: Repeatable ``key=value`` pairs for inline field assignment. - file: Path to a JSON or YAML object file. - branch: Target branch for the operation. - _: Configuration file parameter (handled by callback). +Provide field values with repeatable --set flags or supply a +JSON/YAML object file via --file. The two modes are mutually exclusive. **Usage**: diff --git a/docs/docs/infrahubctl/infrahubctl-delete.mdx b/docs/docs/infrahubctl/infrahubctl-delete.mdx index 138c7bd0..7f619ed4 100644 --- a/docs/docs/infrahubctl/infrahubctl-delete.mdx +++ b/docs/docs/infrahubctl/infrahubctl-delete.mdx @@ -2,16 +2,8 @@ Delete an Infrahub object. -Fetches the object identified by *identifier* (UUID or display name) and -deletes it. Unless ``--yes`` is provided, the user is prompted for -confirmation before the deletion is executed. - -Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: UUID or display name of the object to delete. - yes: When ``True``, skip the interactive confirmation prompt. - branch: Target branch name. - _: Configuration file path (handled by callback). +Fetches the object by KIND and IDENTIFIER, then deletes it. +Unless --yes is provided, a confirmation prompt is shown first. **Usage**: diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx index cebbebe8..548e0bea 100644 --- a/docs/docs/infrahubctl/infrahubctl-get.mdx +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -2,19 +2,8 @@ Query and display Infrahub objects. -When *identifier* is omitted, lists all objects of the given *kind*, -optionally filtered by ``--filter`` arguments. When *identifier* is -provided, displays a single object in detail view. - -Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: UUID or display name for single-object lookup. - filter_args: Repeatable filters in ``attr__value=x`` format. - output: Explicit output format; auto-detected when omitted. - branch: Target branch name. - limit: Maximum number of results to return. - offset: Number of results to skip (pagination). - _: Configuration file path (handled by callback). +When IDENTIFIER is omitted, lists all objects of the given KIND. +When IDENTIFIER is provided, displays a single object in detail view. **Usage**: diff --git a/docs/docs/infrahubctl/infrahubctl-schema.mdx b/docs/docs/infrahubctl/infrahubctl-schema.mdx index 2aa9461c..25500f0e 100644 --- a/docs/docs/infrahubctl/infrahubctl-schema.mdx +++ b/docs/docs/infrahubctl/infrahubctl-schema.mdx @@ -66,14 +66,8 @@ $ infrahubctl schema export [OPTIONS] List all available schema kinds. -Fetches the full schema from the Infrahub instance and displays a -table of node schema entries. Use ``--filter`` to narrow results -by a case-insensitive substring match on the kind name. - -Args: - filter_text: Optional substring to filter kind names. - branch: Target branch name. - _: Configuration file path (handled by callback). +Displays a table of all node schema entries. Use --filter to narrow +results by a case-insensitive match on the kind name. **Usage**: @@ -83,7 +77,7 @@ $ infrahubctl schema list [OPTIONS] **Options**: -* `--filter TEXT`: Filter kinds by name substring +* `--filter TEXT`: Filter kinds by name * `-b, --branch TEXT`: Target branch * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. @@ -117,11 +111,6 @@ Show details for a specific schema kind. Displays metadata, attributes, and relationships for the requested schema kind in a human-readable format. -Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - branch: Target branch name. - _: Configuration file path (handled by callback). - **Usage**: ```console diff --git a/docs/docs/infrahubctl/infrahubctl-update.mdx b/docs/docs/infrahubctl/infrahubctl-update.mdx index 87922ecd..6147d7cb 100644 --- a/docs/docs/infrahubctl/infrahubctl-update.mdx +++ b/docs/docs/infrahubctl/infrahubctl-update.mdx @@ -4,17 +4,7 @@ Update an existing object in Infrahub. Fetches the object identified by KIND and IDENTIFIER, applies the requested changes, and saves the updated object back to the server. - -Changes can be provided either as repeatable ``--set key=value`` -flags or via a ``--file`` pointing to a YAML/JSON object file. -The two modes are mutually exclusive. - -Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: Object UUID or human-readable display name. - set_args: Repeatable key=value pairs for inline field updates. - file: Path to a YAML or JSON object file with update data. - branch: Target branch for the operation. +Provide field values with repeatable --set flags or supply a file via --file. **Usage**: diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index 8e9f8034..5c661902 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -30,16 +30,8 @@ async def create_command( ) -> None: """Create a new object in Infrahub. - Supports two mutually exclusive modes: inline field assignment via - repeatable ``--set key=value`` options, or bulk creation from a - JSON/YAML object file via ``--file``. - - Args: - kind: The Infrahub schema kind to create (e.g. ``InfraDevice``). - set_args: Repeatable ``key=value`` pairs for inline field assignment. - file: Path to a JSON or YAML object file. - branch: Target branch for the operation. - _: Configuration file parameter (handled by callback). + Provide field values with repeatable --set flags or supply a + JSON/YAML object file via --file. The two modes are mutually exclusive. """ if set_args and file: raise typer.BadParameter("--set and --file are mutually exclusive. Use one or the other.") diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py index 79e18b14..224d0b94 100644 --- a/infrahub_sdk/ctl/commands/delete.py +++ b/infrahub_sdk/ctl/commands/delete.py @@ -25,16 +25,8 @@ async def delete_command( ) -> None: """Delete an Infrahub object. - Fetches the object identified by *identifier* (UUID or display name) and - deletes it. Unless ``--yes`` is provided, the user is prompted for - confirmation before the deletion is executed. - - Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: UUID or display name of the object to delete. - yes: When ``True``, skip the interactive confirmation prompt. - branch: Target branch name. - _: Configuration file path (handled by callback). + Fetches the object by KIND and IDENTIFIER, then deletes it. + Unless --yes is provided, a confirmation prompt is shown first. """ client = initialize_client(branch=branch) node = await client.get(kind=kind, id=identifier) diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index e7660792..56e25743 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -35,19 +35,8 @@ async def get_command( ) -> None: """Query and display Infrahub objects. - When *identifier* is omitted, lists all objects of the given *kind*, - optionally filtered by ``--filter`` arguments. When *identifier* is - provided, displays a single object in detail view. - - Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: UUID or display name for single-object lookup. - filter_args: Repeatable filters in ``attr__value=x`` format. - output: Explicit output format; auto-detected when omitted. - branch: Target branch name. - limit: Maximum number of results to return. - offset: Number of results to skip (pagination). - _: Configuration file path (handled by callback). + When IDENTIFIER is omitted, lists all objects of the given KIND. + When IDENTIFIER is provided, displays a single object in detail view. """ client = initialize_client(branch=branch) schema = await client.schema.get(kind=kind, branch=branch) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 4f9277b9..38e730c3 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -37,17 +37,7 @@ async def update_command( Fetches the object identified by KIND and IDENTIFIER, applies the requested changes, and saves the updated object back to the server. - - Changes can be provided either as repeatable ``--set key=value`` - flags or via a ``--file`` pointing to a YAML/JSON object file. - The two modes are mutually exclusive. - - Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - identifier: Object UUID or human-readable display name. - set_args: Repeatable key=value pairs for inline field updates. - file: Path to a YAML or JSON object file with update data. - branch: Target branch for the operation. + Provide field values with repeatable --set flags or supply a file via --file. """ if set_args and file: raise typer.BadParameter("--set and --file are mutually exclusive.") diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 8d5ea73e..936c9897 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -264,20 +264,14 @@ async def export( @app.command(name="list") @catch_exception(console=console) async def schema_list( - filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name substring"), + filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name"), branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), _: str = CONFIG_PARAM, ) -> None: """List all available schema kinds. - Fetches the full schema from the Infrahub instance and displays a - table of node schema entries. Use ``--filter`` to narrow results - by a case-insensitive substring match on the kind name. - - Args: - filter_text: Optional substring to filter kind names. - branch: Target branch name. - _: Configuration file path (handled by callback). + Displays a table of all node schema entries. Use --filter to narrow + results by a case-insensitive match on the kind name. """ client = initialize_client(branch=branch) schemas = await client.schema.all(branch=branch) @@ -317,11 +311,6 @@ async def schema_show( Displays metadata, attributes, and relationships for the requested schema kind in a human-readable format. - - Args: - kind: Infrahub schema kind (e.g. ``InfraDevice``). - branch: Target branch name. - _: Configuration file path (handled by callback). """ client = initialize_client(branch=branch) node_schema = await client.schema.get(kind=kind, branch=branch) From 0c2ecc5da5e3d00ff742e08d3436176cf299e717 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 19:38:35 +0000 Subject: [PATCH 09/32] feat: hide empty columns by default in table and CSV output Add --all-columns flag to `infrahubctl get`. By default, columns where every value is empty are suppressed in table and CSV output, making wide schemas with sparse data much more readable. Use --all-columns to show every column. JSON and YAML output always includes all fields. --- infrahub_sdk/ctl/commands/get.py | 10 +++++++--- infrahub_sdk/ctl/formatters/base.py | 21 ++++++++++++++++++++- infrahub_sdk/ctl/formatters/csv.py | 19 +++++++++++++------ infrahub_sdk/ctl/formatters/json.py | 8 +++++++- infrahub_sdk/ctl/formatters/table.py | 19 +++++++++++++------ infrahub_sdk/ctl/formatters/yaml.py | 8 +++++++- tests/unit/ctl/commands/test_get.py | 2 +- tests/unit/ctl/formatters/test_csv.py | 3 ++- 8 files changed, 70 insertions(+), 20 deletions(-) diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index 56e25743..5f432309 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -10,8 +10,8 @@ from typing import Any -import typer -from rich.console import Console +import typer # pyright: ignore[reportMissingImports] +from rich.console import Console # pyright: ignore[reportMissingImports] from infrahub_sdk.ctl.client import initialize_client from infrahub_sdk.ctl.formatters import OutputFormat, detect_output_format, get_formatter @@ -31,12 +31,16 @@ async def get_command( branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), limit: int | None = typer.Option(None, "--limit", help="Maximum results"), offset: int | None = typer.Option(None, "--offset", help="Skip first N results"), + all_columns: bool = typer.Option(False, "--all-columns", help="Show all columns including empty ones"), _: str = CONFIG_PARAM, ) -> None: """Query and display Infrahub objects. When IDENTIFIER is omitted, lists all objects of the given KIND. When IDENTIFIER is provided, displays a single object in detail view. + + By default, columns where every value is empty are hidden in table + and CSV output. Use --all-columns to show them. """ client = initialize_client(branch=branch) schema = await client.schema.get(kind=kind, branch=branch) @@ -56,7 +60,7 @@ async def get_command( limit=limit, prefetch_relationships=True, ) - result = formatter.format_list(nodes, schema) + result = formatter.format_list(nodes, schema, show_all_columns=all_columns) if fmt == OutputFormat.TABLE: console.print(result, highlight=False) diff --git a/infrahub_sdk/ctl/formatters/base.py b/infrahub_sdk/ctl/formatters/base.py index 74fc389e..8ab5edf4 100644 --- a/infrahub_sdk/ctl/formatters/base.py +++ b/infrahub_sdk/ctl/formatters/base.py @@ -16,12 +16,18 @@ class BaseFormatter(Protocol): for display in various output formats (table, JSON, CSV, YAML). """ - def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: """Format a list of nodes for display. Args: nodes: List of InfrahubNode objects to format. schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. Returns: Formatted string representation of all nodes. @@ -100,6 +106,19 @@ def extract_node_data( return data +def non_empty_columns(rows: list[dict[str, Any]], columns: list[str]) -> list[str]: + """Return only columns that have at least one non-empty value across all rows. + + Args: + rows: List of row dicts (from extract_node_data). + columns: All candidate column names. + + Returns: + Filtered list of column names with data. + """ + return [col for col in columns if any(str(row.get(col, "")).strip() for row in rows)] + + def extract_node_detail( node: InfrahubNode, schema: MainSchemaTypesAPI, diff --git a/infrahub_sdk/ctl/formatters/csv.py b/infrahub_sdk/ctl/formatters/csv.py index 1723c8a9..b266a7a7 100644 --- a/infrahub_sdk/ctl/formatters/csv.py +++ b/infrahub_sdk/ctl/formatters/csv.py @@ -6,7 +6,7 @@ import io from typing import TYPE_CHECKING -from .base import extract_node_data, extract_node_detail +from .base import extract_node_data, extract_node_detail, non_empty_columns if TYPE_CHECKING: from ...node import InfrahubNode @@ -19,7 +19,12 @@ class CsvFormatter: Uses stdlib csv module for proper escaping and quoting of values. """ - def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: """Format a list of nodes as CSV with a header row. Columns correspond to schema attribute and relationship names. @@ -28,18 +33,20 @@ def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> Args: nodes: List of InfrahubNode objects to format. schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. Returns: CSV string with header and data rows. """ - columns = schema.attribute_names + schema.relationship_names + all_columns = schema.attribute_names + schema.relationship_names + rows = [extract_node_data(node, schema) for node in nodes] + columns = all_columns if show_all_columns else non_empty_columns(rows, all_columns) + output = io.StringIO() writer = csv.writer(output) - writer.writerow(columns) - for node in nodes: - row_data = extract_node_data(node, schema) + for row_data in rows: writer.writerow([str(row_data.get(col, "")) for col in columns]) return output.getvalue() diff --git a/infrahub_sdk/ctl/formatters/json.py b/infrahub_sdk/ctl/formatters/json.py index a61af4db..27e029e8 100644 --- a/infrahub_sdk/ctl/formatters/json.py +++ b/infrahub_sdk/ctl/formatters/json.py @@ -18,7 +18,12 @@ class JsonFormatter: Uses stdlib json module with indentation for readable output. """ - def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, # noqa: ARG002 + ) -> str: """Format a list of nodes as a JSON array. Each node is represented as a dict with attribute and @@ -27,6 +32,7 @@ def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> Args: nodes: List of InfrahubNode objects to format. schema: Schema definition for the node kind. + show_all_columns: Accepted for interface compatibility; not used for JSON. Returns: JSON array string. diff --git a/infrahub_sdk/ctl/formatters/table.py b/infrahub_sdk/ctl/formatters/table.py index 9e76de15..f127db74 100644 --- a/infrahub_sdk/ctl/formatters/table.py +++ b/infrahub_sdk/ctl/formatters/table.py @@ -8,7 +8,7 @@ from rich.console import Console from rich.table import Table -from .base import extract_node_data, extract_node_detail +from .base import extract_node_data, extract_node_detail, non_empty_columns if TYPE_CHECKING: from ...node import InfrahubNode @@ -22,7 +22,12 @@ class TableFormatter: column headers derived from the schema field names. """ - def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: """Format a list of nodes as a Rich table. Creates a table with one column per attribute and relationship, @@ -31,18 +36,20 @@ def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> Args: nodes: List of InfrahubNode objects to format. schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. Returns: Rendered table string. """ - columns = schema.attribute_names + schema.relationship_names - table = Table(title=schema.kind, show_lines=False) + all_columns = schema.attribute_names + schema.relationship_names + rows = [extract_node_data(node, schema) for node in nodes] + columns = all_columns if show_all_columns else non_empty_columns(rows, all_columns) + table = Table(title=schema.kind, show_lines=False) for col in columns: table.add_column(col, overflow="fold") - for node in nodes: - row_data = extract_node_data(node, schema) + for row_data in rows: table.add_row(*(str(row_data.get(col, "")) for col in columns)) return self._render(table) diff --git a/infrahub_sdk/ctl/formatters/yaml.py b/infrahub_sdk/ctl/formatters/yaml.py index 499ef757..5040e64b 100644 --- a/infrahub_sdk/ctl/formatters/yaml.py +++ b/infrahub_sdk/ctl/formatters/yaml.py @@ -31,7 +31,12 @@ class YamlFormatter: field2: value2 """ - def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> str: + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, # noqa: ARG002 + ) -> str: """Format a list of nodes as an Infrahub YAML object document. Each node becomes an entry in the spec.data array with its @@ -40,6 +45,7 @@ def format_list(self, nodes: list[InfrahubNode], schema: MainSchemaTypesAPI) -> Args: nodes: List of InfrahubNode objects to format. schema: Schema definition for the node kind. + show_all_columns: Accepted for interface compatibility; not used for YAML. Returns: YAML string in Infrahub object format. diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py index 17c43e33..071fac02 100644 --- a/tests/unit/ctl/commands/test_get.py +++ b/tests/unit/ctl/commands/test_get.py @@ -47,7 +47,7 @@ def test_get_list_mode() -> None: assert result.exit_code == 0 mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) mock_client.filters.assert_awaited_once() - mock_formatter.format_list.assert_called_once_with([mock_node], mock_schema) + mock_formatter.format_list.assert_called_once_with([mock_node], mock_schema, show_all_columns=False) def test_get_detail_mode() -> None: diff --git a/tests/unit/ctl/formatters/test_csv.py b/tests/unit/ctl/formatters/test_csv.py index ae1960f6..42cd194d 100644 --- a/tests/unit/ctl/formatters/test_csv.py +++ b/tests/unit/ctl/formatters/test_csv.py @@ -158,7 +158,8 @@ def test_format_list_empty_nodes_returns_header_only(self) -> None: rows = _parse_csv(result) assert rows == [] - assert "name" in result + # With no data, empty columns are hidden by default + assert not result.strip() class TestCsvFormatterFormatDetail: From 36332e3eaa309649d34a899ae64efa38e74be27d Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 19:58:54 +0000 Subject: [PATCH 10/32] feat: improve empty results UX for infrahubctl get - Exit code 80 when query succeeds but returns zero results (exit 0 when results found, exit 1 on errors) - Print "No objects of kind found." to stderr on empty results - Show " object(s) found." footer in table output - Empty table still renders column headers so the user sees the kind name and knows the command ran --- infrahub_sdk/ctl/commands/get.py | 41 +++++++++++++++++++-------- infrahub_sdk/ctl/formatters/csv.py | 2 +- infrahub_sdk/ctl/formatters/table.py | 3 +- tests/unit/ctl/commands/test_get.py | 3 +- tests/unit/ctl/formatters/test_csv.py | 4 +-- 5 files changed, 36 insertions(+), 17 deletions(-) diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index 5f432309..71e338dc 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -10,8 +10,8 @@ from typing import Any -import typer # pyright: ignore[reportMissingImports] -from rich.console import Console # pyright: ignore[reportMissingImports] +import typer +from rich.console import Console from infrahub_sdk.ctl.client import initialize_client from infrahub_sdk.ctl.formatters import OutputFormat, detect_output_format, get_formatter @@ -19,7 +19,10 @@ from infrahub_sdk.ctl.parsers import parse_filter_args from infrahub_sdk.ctl.utils import catch_exception +EXIT_CODE_NO_RESULTS = 80 + console = Console() +console_stderr = Console(stderr=True) @catch_exception(console=console) @@ -41,6 +44,8 @@ async def get_command( By default, columns where every value is empty are hidden in table and CSV output. Use --all-columns to show them. + + Exit codes: 0 = results found, 80 = query succeeded but no results. """ client = initialize_client(branch=branch) schema = await client.schema.get(kind=kind, branch=branch) @@ -51,18 +56,30 @@ async def get_command( if identifier is not None: node = await client.get(kind=kind, id=identifier) result = formatter.format_detail(node, schema) - else: - filters: dict[str, Any] = parse_filter_args(filter_args or []) - nodes = await client.filters( - kind=kind, - **filters, - offset=offset, - limit=limit, - prefetch_relationships=True, - ) - result = formatter.format_list(nodes, schema, show_all_columns=all_columns) + if fmt == OutputFormat.TABLE: + console.print(result, highlight=False) + else: + typer.echo(result) + return + + filters: dict[str, Any] = parse_filter_args(filter_args or []) + nodes = await client.filters( + kind=kind, + **filters, + offset=offset, + limit=limit, + prefetch_relationships=True, + ) + + count = len(nodes) + result = formatter.format_list(nodes, schema, show_all_columns=all_columns) if fmt == OutputFormat.TABLE: console.print(result, highlight=False) + console.print(f"\n{count} object(s) found.", style="dim") else: typer.echo(result) + + if count == 0: + console_stderr.print(f"No objects of kind {kind} found.", style="yellow") + raise typer.Exit(code=EXIT_CODE_NO_RESULTS) diff --git a/infrahub_sdk/ctl/formatters/csv.py b/infrahub_sdk/ctl/formatters/csv.py index b266a7a7..9f437847 100644 --- a/infrahub_sdk/ctl/formatters/csv.py +++ b/infrahub_sdk/ctl/formatters/csv.py @@ -40,7 +40,7 @@ def format_list( """ all_columns = schema.attribute_names + schema.relationship_names rows = [extract_node_data(node, schema) for node in nodes] - columns = all_columns if show_all_columns else non_empty_columns(rows, all_columns) + columns = all_columns if not rows or show_all_columns else non_empty_columns(rows, all_columns) output = io.StringIO() writer = csv.writer(output) diff --git a/infrahub_sdk/ctl/formatters/table.py b/infrahub_sdk/ctl/formatters/table.py index f127db74..b90854f1 100644 --- a/infrahub_sdk/ctl/formatters/table.py +++ b/infrahub_sdk/ctl/formatters/table.py @@ -43,7 +43,8 @@ def format_list( """ all_columns = schema.attribute_names + schema.relationship_names rows = [extract_node_data(node, schema) for node in nodes] - columns = all_columns if show_all_columns else non_empty_columns(rows, all_columns) + + columns = all_columns if not rows or show_all_columns else non_empty_columns(rows, all_columns) table = Table(title=schema.kind, show_lines=False) for col in columns: diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py index 071fac02..fab743e4 100644 --- a/tests/unit/ctl/commands/test_get.py +++ b/tests/unit/ctl/commands/test_get.py @@ -110,4 +110,5 @@ def test_get_list_mode_with_options(extra_args: list[str]) -> None: ): result = runner.invoke(app, ["get", "InfraDevice", *extra_args]) - assert result.exit_code == 0 + # Exit 80 = query succeeded but no results (empty mock) + assert result.exit_code == 80 diff --git a/tests/unit/ctl/formatters/test_csv.py b/tests/unit/ctl/formatters/test_csv.py index 42cd194d..e7969ab5 100644 --- a/tests/unit/ctl/formatters/test_csv.py +++ b/tests/unit/ctl/formatters/test_csv.py @@ -158,8 +158,8 @@ def test_format_list_empty_nodes_returns_header_only(self) -> None: rows = _parse_csv(result) assert rows == [] - # With no data, empty columns are hidden by default - assert not result.strip() + # With no data rows, all column headers are still shown + assert "name" in result class TestCsvFormatterFormatDetail: From 7b7fb9fe8e969a1ae8bf2abec7eccd6686f1324d Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:10:57 +0000 Subject: [PATCH 11/32] feat: resolve nodes by UUID, default filter, or HFID Add resolve_node() helper that tries multiple lookup strategies: 1. UUID if identifier looks like one 2. Schema default_filter (e.g., name__value) as keyword filter 3. Human-friendly ID (single or multi-component with / separator) This allows commands like `infrahubctl get DcimDevice arista-switch-01` to work using the device name, not just UUID. Applied to get, update, and delete commands. --- infrahub_sdk/ctl/commands/delete.py | 3 +- infrahub_sdk/ctl/commands/get.py | 3 +- infrahub_sdk/ctl/commands/update.py | 3 +- infrahub_sdk/ctl/commands/utils.py | 78 +++++++++++ tests/unit/ctl/commands/test_delete.py | 37 ++++-- tests/unit/ctl/commands/test_get.py | 10 +- tests/unit/ctl/commands/test_update.py | 47 +++++-- tests/unit/ctl/commands/test_utils.py | 172 +++++++++++++++++++++++++ 8 files changed, 323 insertions(+), 30 deletions(-) create mode 100644 infrahub_sdk/ctl/commands/utils.py create mode 100644 tests/unit/ctl/commands/test_utils.py diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py index 224d0b94..37345365 100644 --- a/infrahub_sdk/ctl/commands/delete.py +++ b/infrahub_sdk/ctl/commands/delete.py @@ -9,6 +9,7 @@ from rich.console import Console from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.utils import catch_exception @@ -29,7 +30,7 @@ async def delete_command( Unless --yes is provided, a confirmation prompt is shown first. """ client = initialize_client(branch=branch) - node = await client.get(kind=kind, id=identifier) + node = await resolve_node(client, kind, identifier, branch=branch) if not yes: typer.confirm(f"Delete {kind} '{node.display_label}'?", abort=True) diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index 71e338dc..fc1c7854 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -14,6 +14,7 @@ from rich.console import Console from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node from infrahub_sdk.ctl.formatters import OutputFormat, detect_output_format, get_formatter from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.parsers import parse_filter_args @@ -54,7 +55,7 @@ async def get_command( formatter = get_formatter(fmt) if identifier is not None: - node = await client.get(kind=kind, id=identifier) + node = await resolve_node(client, kind, identifier, schema=schema, branch=branch) result = formatter.format_detail(node, schema) if fmt == OutputFormat.TABLE: console.print(result, highlight=False) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 38e730c3..641ae076 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -13,6 +13,7 @@ from rich.console import Console # pyright: ignore[reportMissingImports] from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields from infrahub_sdk.ctl.utils import catch_exception @@ -86,7 +87,7 @@ async def _update_with_set_args( schema = await client.schema.get(kind=kind, branch=branch) validate_set_fields(data, schema.attribute_names, schema.relationship_names) - node = await client.get(kind=kind, id=identifier) + node = await resolve_node(client, kind, identifier, schema=schema, branch=branch) changes: list[tuple[str, object, str]] = [] for key, new_value in data.items(): diff --git a/infrahub_sdk/ctl/commands/utils.py b/infrahub_sdk/ctl/commands/utils.py new file mode 100644 index 00000000..9a272bc3 --- /dev/null +++ b/infrahub_sdk/ctl/commands/utils.py @@ -0,0 +1,78 @@ +"""Shared utilities for end-user CLI commands.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from infrahub_sdk.schema import NodeSchemaAPI +from infrahub_sdk.utils import is_valid_uuid + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + from infrahub_sdk.schema import MainSchemaTypesAPI + + +async def resolve_node( + client: InfrahubClient, + kind: str, + identifier: str, + schema: MainSchemaTypesAPI | None = None, + branch: str | None = None, +) -> InfrahubNode: + """Resolve a node by identifier, trying multiple lookup strategies. + + Lookup order: + 1. UUID — if the identifier looks like a valid UUID. + 2. Default filter — if the schema defines a ``default_filter`` + (e.g., ``name__value``), use it as a keyword filter. + 3. HFID — if the schema defines a ``human_friendly_id``, treat + the identifier as HFID components (split on ``/`` for + multi-component HFIDs, or as a single component). + + Args: + client: Initialised async Infrahub client. + kind: Infrahub schema kind. + identifier: UUID, display name, or HFID string. + schema: Pre-fetched schema (fetched if not provided). + branch: Optional target branch. + + Returns: + The resolved InfrahubNode. + + Raises: + NodeNotFoundError: If no lookup strategy finds the node. + """ + if schema is None: + schema = await client.schema.get(kind=kind, branch=branch) + + # 1. UUID + if is_valid_uuid(identifier): + return await client.get(kind=kind, id=identifier, branch=branch) + + # 2. Default filter + if isinstance(schema, NodeSchemaAPI) and schema.default_filter: + filters: dict[str, Any] = {schema.default_filter: identifier} + node = await client.get( + kind=kind, + branch=branch, + raise_when_missing=False, + **filters, + ) + if node is not None: + return node + + # 3. HFID (single or multi-component separated by /) + if isinstance(schema, NodeSchemaAPI) and schema.human_friendly_id: + hfid_parts = identifier.split("/") if "/" in identifier else [identifier] + node = await client.get( + kind=kind, + hfid=hfid_parts, + branch=branch, + raise_when_missing=False, + ) + if node is not None: + return node + + # Nothing found — raise with a helpful error via the standard path + return await client.get(kind=kind, id=identifier, branch=branch) diff --git a/tests/unit/ctl/commands/test_delete.py b/tests/unit/ctl/commands/test_delete.py index 6bc9caaa..b2b53748 100644 --- a/tests/unit/ctl/commands/test_delete.py +++ b/tests/unit/ctl/commands/test_delete.py @@ -26,14 +26,15 @@ def test_delete_with_yes() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke(app, ["delete", "InfraDevice", "node-del-001", "--yes"]) assert result.exit_code == 0, result.stdout assert "Deleted" in result.stdout - mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="node-del-001") mock_node.delete.assert_awaited_once() @@ -45,9 +46,11 @@ def test_delete_with_yes_short_flag() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke(app, ["delete", "InfraDevice", "node-del-002", "-y"]) assert result.exit_code == 0, result.stdout @@ -62,9 +65,11 @@ def test_delete_with_branch() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client) as mock_init: + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client) as mock_init, + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke( app, ["delete", "InfraDevice", "node-br-del", "--yes", "--branch", "my-branch"], @@ -83,9 +88,11 @@ def test_delete_confirmation_abort() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke(app, ["delete", "InfraDevice", "node-abort"], input="n\n") assert result.exit_code != 0 @@ -100,9 +107,11 @@ def test_delete_confirmation_yes_input() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke(app, ["delete", "InfraDevice", "node-confirm"], input="y\n") assert result.exit_code == 0, result.stdout @@ -118,9 +127,11 @@ def test_delete_output_contains_id_and_label() -> None: mock_node.delete = AsyncMock() mock_client = MagicMock() - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): result = runner.invoke(app, ["delete", "InfraDevice", "unique-id-xyz", "--yes"]) assert result.exit_code == 0, result.stdout diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py index fab743e4..845ae3c4 100644 --- a/tests/unit/ctl/commands/test_get.py +++ b/tests/unit/ctl/commands/test_get.py @@ -51,7 +51,7 @@ def test_get_list_mode() -> None: def test_get_detail_mode() -> None: - """Detail mode calls ``client.get`` when an identifier is supplied.""" + """Detail mode calls ``resolve_node`` when an identifier is supplied.""" mock_schema = MagicMock() mock_schema.attribute_names = ["name"] mock_schema.relationship_names = [] @@ -63,7 +63,6 @@ def test_get_detail_mode() -> None: mock_client = MagicMock() mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - mock_client.get = AsyncMock(return_value=mock_node) mock_formatter = MagicMock() mock_formatter.format_detail.return_value = '{"id": "abc-123"}' @@ -72,11 +71,16 @@ def test_get_detail_mode() -> None: patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + patch( + "infrahub_sdk.ctl.commands.get.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, ): result = runner.invoke(app, ["get", "InfraDevice", "abc-123"]) assert result.exit_code == 0 - mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="abc-123") + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "abc-123", schema=mock_schema, branch=None) mock_formatter.format_detail.assert_called_once_with(mock_node, mock_schema) diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py index 549179e3..8825b8a4 100644 --- a/tests/unit/ctl/commands/test_update.py +++ b/tests/unit/ctl/commands/test_update.py @@ -57,18 +57,22 @@ def getattr_side_effect(obj: object, name: str) -> MagicMock: mock_client = MagicMock() mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - mock_client.get = AsyncMock(return_value=mock_node) with ( patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), patch("infrahub_sdk.ctl.commands.update.getattr", side_effect=getattr_side_effect, create=True), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, ): result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "name=router1"]) assert result.exit_code == 0, result.stdout assert "Updated" in result.stdout mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) - mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="abc-123") + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "abc-123", schema=mock_schema, branch=None) mock_node.save.assert_awaited_once() @@ -92,9 +96,15 @@ def test_update_with_set_args_attribute_applied() -> None: mock_client = MagicMock() mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + ): result = runner.invoke(app, ["update", "InfraDevice", "node-001", "--set", "description=new description"]) assert result.exit_code == 0, result.stdout @@ -103,7 +113,7 @@ def test_update_with_set_args_attribute_applied() -> None: def test_update_with_set_args_and_branch() -> None: - """``update`` forwards --branch to schema and get calls.""" + """``update`` forwards --branch to schema and resolve_node calls.""" mock_schema = MagicMock() mock_schema.attribute_names = ["name"] mock_schema.relationship_names = [] @@ -119,9 +129,15 @@ def test_update_with_set_args_and_branch() -> None: mock_client = MagicMock() mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, + ): result = runner.invoke( app, ["update", "InfraDevice", "node-br", "--set", "name=newname", "--branch", "feature-x"], @@ -129,7 +145,7 @@ def test_update_with_set_args_and_branch() -> None: assert result.exit_code == 0, result.stdout mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="feature-x") - mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="node-br") + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "node-br", schema=mock_schema, branch="feature-x") def test_update_invalid_field() -> None: @@ -142,7 +158,10 @@ def test_update_invalid_field() -> None: mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.update.resolve_node", new_callable=AsyncMock), + ): result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "unknown_field=value"]) assert result.exit_code != 0 @@ -216,9 +235,15 @@ def test_update_with_set_args_relationship() -> None: mock_client = MagicMock() mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) - mock_client.get = AsyncMock(return_value=mock_node) - with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + ): result = runner.invoke(app, ["update", "InfraDevice", "node-rel-001", "--set", "site=new-site-id"]) assert result.exit_code == 0, result.stdout diff --git a/tests/unit/ctl/commands/test_utils.py b/tests/unit/ctl/commands/test_utils.py new file mode 100644 index 00000000..57f370db --- /dev/null +++ b/tests/unit/ctl/commands/test_utils.py @@ -0,0 +1,172 @@ +"""Unit tests for ``infrahub_sdk.ctl.commands.utils``.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from infrahub_sdk.ctl.commands.utils import resolve_node +from infrahub_sdk.exceptions import NodeNotFoundError +from infrahub_sdk.schema import NodeSchemaAPI + + +@pytest.fixture +def mock_client() -> MagicMock: + """Return a mock InfrahubClient with async schema and get methods.""" + client = MagicMock() + client.schema = MagicMock() + client.schema.get = AsyncMock() + client.get = AsyncMock() + return client + + +@pytest.mark.anyio +async def test_resolve_by_uuid(mock_client: MagicMock) -> None: + """When the identifier is a valid UUID, ``client.get(id=...)`` is called directly.""" + mock_schema = MagicMock() + mock_schema.default_filter = None + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + uuid_identifier = "12345678-1234-5678-1234-567812345678" + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=True): + result = await resolve_node(mock_client, "InfraDevice", uuid_identifier) + + assert result is expected_node + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id=uuid_identifier, branch=None) + + +@pytest.mark.anyio +async def test_resolve_by_default_filter(mock_client: MagicMock) -> None: + """When the schema has a ``default_filter``, it is used as a keyword filter.""" + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = "name__value" + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + branch=None, + raise_when_missing=False, + name__value="router1", + ) + + +@pytest.mark.anyio +async def test_resolve_by_hfid(mock_client: MagicMock) -> None: + """When the schema defines ``human_friendly_id``, ``client.get(hfid=...)`` is used.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = ["name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + hfid=["router1"], + branch=None, + raise_when_missing=False, + ) + + +@pytest.mark.anyio +async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: + """Multi-component HFID strings (``a/b``) are split on ``/``.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = ["site__name__value", "name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "london/router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + hfid=["london", "router1"], + branch=None, + raise_when_missing=False, + ) + + +@pytest.mark.anyio +async def test_resolve_fallback_raises(mock_client: MagicMock) -> None: + """When no lookup strategy matches, the fallback ``client.get(id=...)`` call raises.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + mock_client.get = AsyncMock( + side_effect=NodeNotFoundError(identifier={"id": ["unknown-name"]}, node_type="InfraDevice") + ) + + with ( + patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False), + pytest.raises(NodeNotFoundError), + ): + await resolve_node(mock_client, "InfraDevice", "unknown-name") + + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="unknown-name", branch=None) + + +@pytest.mark.anyio +async def test_resolve_uses_provided_schema(mock_client: MagicMock) -> None: + """When ``schema`` is provided, ``client.schema.get`` is not called.""" + pre_fetched_schema = MagicMock(spec=NodeSchemaAPI) + pre_fetched_schema.default_filter = None + pre_fetched_schema.human_friendly_id = None + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + uuid_identifier = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=True): + result = await resolve_node(mock_client, "InfraDevice", uuid_identifier, schema=pre_fetched_schema) + + assert result is expected_node + mock_client.schema.get.assert_not_awaited() + + +@pytest.mark.anyio +async def test_resolve_default_filter_miss_falls_through_to_hfid(mock_client: MagicMock) -> None: + """When the default-filter lookup returns ``None``, the HFID strategy is tried next.""" + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = "name__value" + mock_schema.human_friendly_id = ["name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + # First call (default_filter) returns None; second call (hfid) returns the node. + mock_client.get = AsyncMock(side_effect=[None, expected_node]) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + assert mock_client.get.await_count == 2 From c9119eaadbedfb8806cdd528b7ef084174c961f4 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:24:46 +0000 Subject: [PATCH 12/32] fix: make YAML output round-trippable with infrahubctl object load - Omit empty/null attribute values instead of writing empty strings (fixes BigInt validation errors on reload) - Omit unset relationships instead of writing empty strings or {data: []} - Use HFID for relationship references: single-component as string, multi-component as list (fixes HFID element count mismatch errors) - Preserve falsy-but-valid values (0, False) --- infrahub_sdk/ctl/formatters/yaml.py | 116 +++++++-------- tests/unit/ctl/formatters/test_yaml.py | 186 ++++++++++++------------- 2 files changed, 147 insertions(+), 155 deletions(-) diff --git a/infrahub_sdk/ctl/formatters/yaml.py b/infrahub_sdk/ctl/formatters/yaml.py index 5040e64b..9100fd7c 100644 --- a/infrahub_sdk/ctl/formatters/yaml.py +++ b/infrahub_sdk/ctl/formatters/yaml.py @@ -1,12 +1,15 @@ -"""YAML formatter for InfrahubNode query results in Infrahub object format.""" +"""YAML formatter for InfrahubNode query results in Infrahub object format. + +Produces YAML that is round-trippable with ``infrahubctl object load``. +Empty/null attribute values and unset relationships are omitted so the +output can be loaded back without validation errors. +""" from __future__ import annotations from typing import TYPE_CHECKING, Any -import yaml - -from .base import extract_node_detail +import yaml # type: ignore[import-untyped] if TYPE_CHECKING: from ...node import InfrahubNode @@ -37,34 +40,12 @@ def format_list( schema: MainSchemaTypesAPI, show_all_columns: bool = False, # noqa: ARG002 ) -> str: - """Format a list of nodes as an Infrahub YAML object document. - - Each node becomes an entry in the spec.data array with its - attribute and relationship values. - - Args: - nodes: List of InfrahubNode objects to format. - schema: Schema definition for the node kind. - show_all_columns: Accepted for interface compatibility; not used for YAML. - - Returns: - YAML string in Infrahub object format. - """ + """Format a list of nodes as an Infrahub YAML object document.""" data_items = [self._node_to_data_entry(node, schema) for node in nodes] return self._build_document(schema.kind, data_items) def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: - """Format a single node as an Infrahub YAML object document. - - The spec.data array contains a single entry for the node. - - Args: - node: The InfrahubNode to format. - schema: Schema definition for the node kind. - - Returns: - YAML string in Infrahub object format. - """ + """Format a single node as an Infrahub YAML object document.""" data_entry = self._node_to_data_entry(node, schema) return self._build_document(schema.kind, [data_entry]) @@ -73,55 +54,46 @@ def _node_to_data_entry( node: InfrahubNode, schema: MainSchemaTypesAPI, ) -> dict[str, Any]: - """Convert a single node into a data entry dict for YAML output. - - Args: - node: The InfrahubNode to convert. - schema: Schema definition for the node kind. + """Convert a node into a dict compatible with ObjectFile spec format. - Returns: - Dict suitable for inclusion in the spec.data array. + Omits empty/null attribute values and unset relationships so the + output can be loaded back via ``infrahubctl object load`` without + validation errors. """ - detail = extract_node_detail(node, schema) entry: dict[str, Any] = {} - # Attributes: extract plain values + # Attributes: only include non-empty values for attr_name in schema.attribute_names: - attr_detail = detail.get(attr_name, {}) - if isinstance(attr_detail, dict): - entry[attr_name] = attr_detail.get("value", "") - else: - entry[attr_name] = attr_detail + attr = getattr(node, attr_name, None) + if attr is None: + continue + value = attr.value + if not value and value != 0 and value is not False: + continue + entry[attr_name] = value - # Relationships: format depends on cardinality + # Relationships: skip unset, use HFID when available for rel_name in schema.relationship_names: - rel_detail = detail.get(rel_name, {}) - if not isinstance(rel_detail, dict): - entry[rel_name] = rel_detail + rel_schema = schema.get_relationship(rel_name) + rel = getattr(node, rel_name, None) + if rel is None: continue - if rel_detail.get("cardinality") == "one": - entry[rel_name] = rel_detail.get("display_label", "") + if rel_schema.cardinality == "one": + ref = _related_node_ref(rel) + if ref is not None: + entry[rel_name] = ref else: - peers = rel_detail.get("peers", []) - if peers: - entry[rel_name] = {"data": [p.get("display_label", "") for p in peers]} - else: - entry[rel_name] = {"data": []} + peers = getattr(rel, "peers", None) or [] + refs = [r for p in peers if (r := _related_node_ref(p)) is not None] + if refs: + entry[rel_name] = {"data": refs} return entry @staticmethod def _build_document(kind: str, data: list[dict[str, Any]]) -> str: - """Build the full Infrahub YAML document structure. - - Args: - kind: The schema kind string (e.g. "InfraDevice"). - data: List of data entry dicts for the spec.data array. - - Returns: - Complete YAML document string with leading '---' separator. - """ + """Build the full Infrahub YAML document structure.""" document = { "apiVersion": _INFRAHUB_API_VERSION, "kind": _INFRAHUB_KIND, @@ -136,3 +108,23 @@ def _build_document(kind: str, data: list[dict[str, Any]]) -> str: sort_keys=False, allow_unicode=True, ) + + +def _related_node_ref(rel: Any) -> str | list[str] | None: + """Build a reference value for a related node suitable for ObjectFile. + + Uses the HFID if available. For single-component HFIDs, returns a + plain string. For multi-component HFIDs, returns a list. Falls back + to display_label. + + Args: + rel: A RelatedNode object. + + Returns: + A string, list of strings, or None if the relationship is unset. + """ + hfid = getattr(rel, "hfid", None) + if hfid: + return hfid[0] if len(hfid) == 1 else list(hfid) + label = getattr(rel, "display_label", None) + return label or None diff --git a/tests/unit/ctl/formatters/test_yaml.py b/tests/unit/ctl/formatters/test_yaml.py index 25a7525b..e0a66268 100644 --- a/tests/unit/ctl/formatters/test_yaml.py +++ b/tests/unit/ctl/formatters/test_yaml.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import yaml # pyright: ignore[reportMissingModuleSource] @@ -46,6 +46,7 @@ def _make_mock_node( Args: attr_values: Mapping of attribute name to value. rel_values: Mapping of relationship name to display_label string. + The display_label is also used as a single-component HFID. node_id: The node ID. display_label: The display label for the node. @@ -63,6 +64,7 @@ def _make_mock_node( rel = MagicMock() rel.display_label = label rel.id = f"{rel_name}-id" + rel.hfid = [label] if label else None setattr(node, rel_name, rel) return node @@ -207,136 +209,134 @@ def test_format_detail_relationship_uses_display_label(self) -> None: class TestYamlFormatterEdgeCases: - """Edge case tests targeting uncovered branches in YamlFormatter._node_to_data_entry.""" + """Edge case tests for YamlFormatter._node_to_data_entry.""" - def test_attr_detail_not_dict_uses_raw_value(self) -> None: - """Test that a non-dict attr_detail is used as the raw entry value. - - Covers the ``else`` branch in _node_to_data_entry for attributes when - detail.get(attr_name) returns something that is not a dict. - """ - schema = _make_mock_schema(["name"], []) - node = _make_mock_node({"name": "router1"}, {}) + def test_null_attribute_omitted(self) -> None: + """Attributes with None values are omitted from the output.""" + schema = _make_mock_schema(["name", "desc"], []) + node = MagicMock() + name_attr = MagicMock() + name_attr.value = "router1" + node.name = name_attr + desc_attr = MagicMock() + desc_attr.value = None + node.desc = desc_attr formatter = YamlFormatter() - fake_detail = { - "id": "test-id", - "display_label": "Test", - "kind": "TestKind", - "name": "raw-string-value", # not a dict - } - with patch( - "infrahub_sdk.ctl.formatters.yaml.extract_node_detail", - return_value=fake_detail, - ): - result = formatter.format_detail(node, schema) - + result = formatter.format_detail(node, schema) parsed = yaml.safe_load(result) - assert parsed["spec"]["data"][0]["name"] == "raw-string-value" + entry = parsed["spec"]["data"][0] + assert entry["name"] == "router1" + assert "desc" not in entry - def test_rel_detail_not_dict_uses_raw_value(self) -> None: - """Test that a non-dict rel_detail is used as the raw entry value. - - Covers the ``not isinstance(rel_detail, dict)`` branch for relationships. - """ - schema = _make_mock_schema([], ["site"]) - node = _make_mock_node({}, {"site": "DC1"}) + def test_empty_string_attribute_omitted(self) -> None: + """Attributes with empty string values are omitted.""" + schema = _make_mock_schema(["name", "desc"], []) + node = MagicMock() + name_attr = MagicMock() + name_attr.value = "router1" + node.name = name_attr + desc_attr = MagicMock() + desc_attr.value = "" + node.desc = desc_attr formatter = YamlFormatter() - fake_detail = { - "id": "test-id", - "display_label": "Test", - "kind": "TestKind", - "site": "non-dict-rel-value", # not a dict - } - with patch( - "infrahub_sdk.ctl.formatters.yaml.extract_node_detail", - return_value=fake_detail, - ): - result = formatter.format_detail(node, schema) + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert "desc" not in parsed["spec"]["data"][0] + def test_zero_attribute_preserved(self) -> None: + """Numeric zero is a valid value and must not be omitted.""" + schema = _make_mock_schema(["count"], []) + node = MagicMock() + attr = MagicMock() + attr.value = 0 + node.count = attr + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) parsed = yaml.safe_load(result) - assert parsed["spec"]["data"][0]["site"] == "non-dict-rel-value" + assert parsed["spec"]["data"][0]["count"] == 0 - def test_rel_cardinality_one_with_empty_display_label(self) -> None: - """Test cardinality-one relationship with an empty display_label. + def test_false_attribute_preserved(self) -> None: + """Boolean False is a valid value and must not be omitted.""" + schema = _make_mock_schema(["enabled"], []) + node = MagicMock() + attr = MagicMock() + attr.value = False + node.enabled = attr + formatter = YamlFormatter() - Covers the ``cardinality == "one"`` branch where display_label is "". - """ + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["enabled"] is False + + def test_rel_cardinality_one_unset_omitted(self) -> None: + """Cardinality-one relationship with no display_label or hfid is omitted.""" schema = _make_mock_schema([], ["site"]) - node = _make_mock_node({}, {}) - # Attach a relationship with empty display_label using configure_mock - # to avoid setattr with a constant string literal. + node = MagicMock() rel = MagicMock() - rel.display_label = "" - rel.id = "site-id" - node.configure_mock(site=rel) + rel.display_label = None + rel.hfid = None + node.site = rel formatter = YamlFormatter() result = formatter.format_detail(node, schema) - parsed = yaml.safe_load(result) - assert not parsed["spec"]["data"][0]["site"] + assert "site" not in parsed["spec"]["data"][0] - def test_rel_cardinality_many_with_empty_peers(self) -> None: - """Test cardinality-many relationship with an empty peers list. - - Covers the ``peers`` empty branch producing ``{"data": []}``. - """ + def test_rel_cardinality_many_empty_peers_omitted(self) -> None: + """Cardinality-many with no peers is omitted from output.""" schema = MagicMock() schema.kind = "TestKind" schema.attribute_names = [] schema.relationship_names = ["tags"] - - def get_rel_side_effect(name: str) -> MagicMock: - rel = MagicMock() - rel.cardinality = "many" - return rel - - schema.get_relationship = MagicMock(side_effect=get_rel_side_effect) + rel_schema = MagicMock() + rel_schema.cardinality = "many" + schema.get_relationship.return_value = rel_schema node = MagicMock() - node.id = "test-id" - node.display_label = "Test" rel_manager = MagicMock() rel_manager.peers = [] - node.configure_mock(tags=rel_manager) - + node.tags = rel_manager formatter = YamlFormatter() - result = formatter.format_detail(node, schema) + result = formatter.format_detail(node, schema) parsed = yaml.safe_load(result) - assert parsed["spec"]["data"][0]["tags"] == {"data": []} - - def test_rel_cardinality_many_with_peers(self) -> None: - """Test cardinality-many relationship with populated peers. + assert "tags" not in parsed["spec"]["data"][0] - Covers the ``peers`` non-empty branch producing ``{"data": [...]}``. - """ + def test_rel_cardinality_many_with_peers_uses_hfid(self) -> None: + """Cardinality-many peers use HFID when available.""" schema = MagicMock() schema.kind = "TestKind" schema.attribute_names = [] schema.relationship_names = ["tags"] - - def get_rel_side_effect(name: str) -> MagicMock: - rel = MagicMock() - rel.cardinality = "many" - return rel - - schema.get_relationship = MagicMock(side_effect=get_rel_side_effect) + rel_schema = MagicMock() + rel_schema.cardinality = "many" + schema.get_relationship.return_value = rel_schema node = MagicMock() - node.id = "test-id" - node.display_label = "Test" + peer1 = MagicMock(display_label="tag1", hfid=["tag1"]) + peer2 = MagicMock(display_label="tag2", hfid=["tag2"]) rel_manager = MagicMock() - rel_manager.peers = [ - MagicMock(display_label="peer1", id="id1"), - MagicMock(display_label="peer2", id="id2"), - ] - node.configure_mock(tags=rel_manager) - + rel_manager.peers = [peer1, peer2] + node.tags = rel_manager formatter = YamlFormatter() + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["tags"] == {"data": ["tag1", "tag2"]} + def test_rel_multi_component_hfid(self) -> None: + """Multi-component HFID renders as a list.""" + schema = _make_mock_schema([], ["platform"]) + node = MagicMock() + rel = MagicMock() + rel.display_label = "Cisco NX-OS" + rel.hfid = ["Cisco", "NX-OS"] + node.platform = rel + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) parsed = yaml.safe_load(result) - assert parsed["spec"]["data"][0]["tags"] == {"data": ["peer1", "peer2"]} + assert parsed["spec"]["data"][0]["platform"] == ["Cisco", "NX-OS"] From ee6b73f7f67174c600a844acdb54a372b4079113 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:36:26 +0000 Subject: [PATCH 13/32] fix: clean error output for missing --set/--file arguments Replace typer.BadParameter with console.print + typer.Exit for create/update validation errors. BadParameter was caught by the generic exception handler which printed a full traceback. Now shows a clean error message with usage example. --- infrahub_sdk/ctl/commands/create.py | 7 +++++-- infrahub_sdk/ctl/commands/update.py | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index 5c661902..ddb41cfd 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -34,9 +34,12 @@ async def create_command( JSON/YAML object file via --file. The two modes are mutually exclusive. """ if set_args and file: - raise typer.BadParameter("--set and --file are mutually exclusive. Use one or the other.") + console.print("[red]Error: --set and --file are mutually exclusive.") + raise typer.Exit(code=1) if not set_args and not file: - raise typer.BadParameter("Provide either --set key=value pairs or --file .") + console.print("[red]Error: provide --set key=value or --file .") + console.print("Example: infrahubctl create MyKind --set name=foo --set status=active") + raise typer.Exit(code=1) client = initialize_client(branch=branch) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 641ae076..a5c2be56 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -41,10 +41,13 @@ async def update_command( Provide field values with repeatable --set flags or supply a file via --file. """ if set_args and file: - raise typer.BadParameter("--set and --file are mutually exclusive.") + console.print("[red]Error: --set and --file are mutually exclusive.") + raise typer.Exit(code=1) if not set_args and not file: - raise typer.BadParameter("Provide either --set or --file to specify update data.") + console.print("[red]Error: provide --set key=value or --file .") + console.print("Example: infrahubctl update MyKind my-node --set field=value") + raise typer.Exit(code=1) client = initialize_client(branch=branch) From 233ac0b1c06198ca1e11ea8b11db1074d0898972 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:46:08 +0000 Subject: [PATCH 14/32] feat: resolve relationship values by name in create/update commands When using --set location=LON-1, the CLI now resolves the relationship target by searching for the node across all schema kinds. This handles generic peer types (e.g., LocationHosting) where the concrete kind (LocationBuilding) differs from the relationship declaration. Lookup order for relationship values: 1. UUID if it looks like one 2. Direct lookup on the declared peer kind (default_filter, then HFID) 3. Search all node schemas by default_filter and HFID --- infrahub_sdk/ctl/commands/create.py | 2 + infrahub_sdk/ctl/commands/update.py | 12 +-- infrahub_sdk/ctl/commands/utils.py | 116 +++++++++++++++++++++++++ tests/unit/ctl/commands/test_update.py | 8 +- 4 files changed, 131 insertions(+), 7 deletions(-) diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index ddb41cfd..85495f1f 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -12,6 +12,7 @@ from rich.console import Console from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_relationship_values from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields from infrahub_sdk.ctl.utils import catch_exception @@ -54,6 +55,7 @@ async def create_command( data = parse_set_args(set_args) # type: ignore[arg-type] schema = await client.schema.get(kind=kind, branch=branch) validate_set_fields(data, schema.attribute_names, schema.relationship_names) + data = await resolve_relationship_values(client, data, schema, branch=branch) node = await client.create(kind=kind, data=data, branch=branch) await node.save(allow_upsert=True) console.print(f"[green]Created {kind} '{node.display_label}' (id: {node.id})") diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index a5c2be56..196df925 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -13,7 +13,7 @@ from rich.console import Console # pyright: ignore[reportMissingImports] from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.commands.utils import resolve_node +from infrahub_sdk.ctl.commands.utils import resolve_node, resolve_relationship_values from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields from infrahub_sdk.ctl.utils import catch_exception @@ -92,7 +92,9 @@ async def _update_with_set_args( node = await resolve_node(client, kind, identifier, schema=schema, branch=branch) - changes: list[tuple[str, object, str]] = [] + resolved_data = await resolve_relationship_values(client, data, schema, branch=branch) + + changes: list[tuple[str, object, object]] = [] for key, new_value in data.items(): if key in schema.attribute_names: attr = getattr(node, key) @@ -101,10 +103,8 @@ async def _update_with_set_args( changes.append((key, old_value, new_value)) elif key in schema.relationship_names: rel = getattr(node, key) - old_id = getattr(rel, "id", None) - await rel.fetch() # type: ignore[union-attr] - old_display = getattr(rel, "display_label", old_id) - setattr(node, key, {"id": new_value}) + old_display = getattr(rel, "display_label", getattr(rel, "id", None)) + setattr(node, key, resolved_data[key]) changes.append((key, old_display, new_value)) await node.save() diff --git a/infrahub_sdk/ctl/commands/utils.py b/infrahub_sdk/ctl/commands/utils.py index 9a272bc3..c773a381 100644 --- a/infrahub_sdk/ctl/commands/utils.py +++ b/infrahub_sdk/ctl/commands/utils.py @@ -2,11 +2,14 @@ from __future__ import annotations +import logging from typing import TYPE_CHECKING, Any from infrahub_sdk.schema import NodeSchemaAPI from infrahub_sdk.utils import is_valid_uuid +logger = logging.getLogger(__name__) + if TYPE_CHECKING: from infrahub_sdk import InfrahubClient from infrahub_sdk.node import InfrahubNode @@ -76,3 +79,116 @@ async def resolve_node( # Nothing found — raise with a helpful error via the standard path return await client.get(kind=kind, id=identifier, branch=branch) + + +async def resolve_relationship_values( + client: InfrahubClient, + data: dict[str, Any], + schema: MainSchemaTypesAPI, + branch: str | None = None, +) -> dict[str, Any]: + """Resolve relationship string values in a data dict to node IDs. + + For each key that is a relationship name in the schema, attempts to + look up the target node by the string value (using the relationship's + peer kind). The value is replaced with ``{"id": ""}`` so the + SDK can create/update the node correctly. + + Attribute values are passed through unchanged. + + Args: + client: Initialised async Infrahub client. + data: Parsed data from ``--set`` arguments. + schema: Schema for the kind being created/updated. + branch: Optional target branch. + + Returns: + A new dict with relationship values resolved to ID references. + """ + resolved: dict[str, Any] = {} + + for key, value in data.items(): + if key not in schema.relationship_names: + resolved[key] = value + continue + + # Already a dict (e.g. {"id": "uuid"}) — pass through + if isinstance(value, dict): + resolved[key] = value + continue + + str_value = str(value) + rel_schema = schema.get_relationship(key) + peer_kind = rel_schema.peer + + # Try to resolve the string value as a node identifier + try: + peer_node = await resolve_node(client, peer_kind, str_value, branch=branch) + resolved[key] = {"id": peer_node.id} + except Exception: + # If the peer kind is a generic, try searching all schemas + # for a node matching the value + node = await _search_generic_peer(client, str_value, branch=branch) + if node is not None: + resolved[key] = {"id": node.id} + else: + resolved[key] = value + + return resolved + + +async def _search_generic_peer( + client: InfrahubClient, + identifier: str, + branch: str | None = None, +) -> InfrahubNode | None: + """Search across all node schemas for a node matching the identifier. + + Used as a fallback when the relationship peer is a generic type + and the direct lookup fails. + + Args: + client: Initialised async Infrahub client. + identifier: Display name or HFID to search for. + branch: Optional target branch. + + Returns: + The matched node, or None if not found. + """ + all_schemas = await client.schema.all(branch=branch) + hfid_parts = identifier.split("/") if "/" in identifier else [identifier] + + for schema in all_schemas.values(): + if not isinstance(schema, NodeSchemaAPI): + continue + + # Try default_filter first + if schema.default_filter: + try: + filters: dict[str, Any] = {schema.default_filter: identifier} + node = await client.get( # type: ignore[arg-type] + kind=schema.kind, + branch=branch, + raise_when_missing=False, + **filters, + ) + if node is not None: + return node + except Exception: + logger.debug("Failed default_filter for %r via %s", identifier, schema.kind) + + # Try HFID + if schema.human_friendly_id: + try: + node = await client.get( + kind=schema.kind, + hfid=hfid_parts, + branch=branch, + raise_when_missing=False, + ) + if node is not None: + return node + except Exception: + logger.debug("Failed HFID for %r via %s", identifier, schema.kind) + + return None diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py index 8825b8a4..fdc32b16 100644 --- a/tests/unit/ctl/commands/test_update.py +++ b/tests/unit/ctl/commands/test_update.py @@ -236,6 +236,9 @@ def test_update_with_set_args_relationship() -> None: mock_client.schema = MagicMock() mock_client.schema.get = AsyncMock(return_value=mock_schema) + async def passthrough_resolve(client: object, data: object, schema: object, **kwargs: object) -> dict: + return {"site": {"id": "new-site-id"}} + with ( patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), patch( @@ -243,12 +246,15 @@ def test_update_with_set_args_relationship() -> None: new_callable=AsyncMock, return_value=mock_node, ), + patch( + "infrahub_sdk.ctl.commands.update.resolve_relationship_values", + side_effect=passthrough_resolve, + ), ): result = runner.invoke(app, ["update", "InfraDevice", "node-rel-001", "--set", "site=new-site-id"]) assert result.exit_code == 0, result.stdout assert "Updated" in result.stdout - mock_rel.fetch.assert_awaited_once() mock_node.save.assert_awaited_once() From aca8c891eefe65c4ab6dd18ee4b7bd0ffb54a9f0 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:48:20 +0000 Subject: [PATCH 15/32] fix: show name instead of None in create confirmation message --- infrahub_sdk/ctl/commands/create.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index 85495f1f..7274e718 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -58,4 +58,5 @@ async def create_command( data = await resolve_relationship_values(client, data, schema, branch=branch) node = await client.create(kind=kind, data=data, branch=branch) await node.save(allow_upsert=True) - console.print(f"[green]Created {kind} '{node.display_label}' (id: {node.id})") + label = node.display_label or data.get("name") or node.id + console.print(f"[green]Created {kind} '{label}' (id: {node.id})") From 9f2813e8fb715876d5ef806a5aec3feadbe12c70 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:51:23 +0000 Subject: [PATCH 16/32] feat: distinguish create vs upsert in confirmation message --- infrahub_sdk/ctl/commands/create.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index 7274e718..aebd9449 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -6,13 +6,14 @@ from __future__ import annotations +import contextlib from pathlib import Path -import typer -from rich.console import Console +import typer # pyright: ignore[reportMissingImports] +from rich.console import Console # pyright: ignore[reportMissingImports] from infrahub_sdk.ctl.client import initialize_client -from infrahub_sdk.ctl.commands.utils import resolve_relationship_values +from infrahub_sdk.ctl.commands.utils import resolve_node, resolve_relationship_values from infrahub_sdk.ctl.parameters import CONFIG_PARAM from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields from infrahub_sdk.ctl.utils import catch_exception @@ -56,7 +57,19 @@ async def create_command( schema = await client.schema.get(kind=kind, branch=branch) validate_set_fields(data, schema.attribute_names, schema.relationship_names) data = await resolve_relationship_values(client, data, schema, branch=branch) + + # Check if node already exists to distinguish create from upsert + existing = None + name_value = data.get("name") + if name_value is not None: + with contextlib.suppress(Exception): + existing = await resolve_node(client, kind, str(name_value), schema=schema, branch=branch) + node = await client.create(kind=kind, data=data, branch=branch) await node.save(allow_upsert=True) - label = node.display_label or data.get("name") or node.id - console.print(f"[green]Created {kind} '{label}' (id: {node.id})") + label = node.display_label or name_value or node.id + + if existing: + console.print(f"[yellow]Updated {kind} '{label}' (id: {node.id}) — already existed") + else: + console.print(f"[green]Created {kind} '{label}' (id: {node.id})") From 0c730be8be6a4a6d4f5e3470bc9587799c596c47 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 20:56:27 +0000 Subject: [PATCH 17/32] feat: skip save and show no-op message when update values unchanged --- infrahub_sdk/ctl/commands/update.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 196df925..e16462e2 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -107,10 +107,16 @@ async def _update_with_set_args( setattr(node, key, resolved_data[key]) changes.append((key, old_display, new_value)) + actual_changes = [(f, o, n) for f, o, n in changes if str(o) != str(n)] + + if not actual_changes: + console.print(f"[yellow]No changes — {kind} '{identifier}' already has the requested values.") + return + await node.save() console.print(f"[green]Updated {kind} '{identifier}' successfully.") - for field_name, old_val, new_val in changes: + for field_name, old_val, new_val in actual_changes: console.print(f" {field_name}: {old_val} -> {new_val}") From db23a7daf58e0daf023a6bd0e195ef77c20a80b1 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 21:00:17 +0000 Subject: [PATCH 18/32] docs: regenerate get command docs (--all-columns flag added) --- docs/docs/infrahubctl/infrahubctl-get.mdx | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx index 548e0bea..73559d02 100644 --- a/docs/docs/infrahubctl/infrahubctl-get.mdx +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -5,6 +5,11 @@ Query and display Infrahub objects. When IDENTIFIER is omitted, lists all objects of the given KIND. When IDENTIFIER is provided, displays a single object in detail view. +By default, columns where every value is empty are hidden in table +and CSV output. Use --all-columns to show them. + +Exit codes: 0 = results found, 80 = query succeeded but no results. + **Usage**: ```console @@ -23,6 +28,7 @@ $ infrahubctl get [OPTIONS] KIND [IDENTIFIER] * `-b, --branch TEXT`: Target branch * `--limit INTEGER`: Maximum results * `--offset INTEGER`: Skip first N results +* `--all-columns`: Show all columns including empty ones * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--install-completion`: Install completion for the current shell. * `--show-completion`: Show completion for the current shell, to copy it or customize the installation. From 463371378c5d0439223b1cff8e3c6a736b4a3b41 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:33:11 +0000 Subject: [PATCH 19/32] docs: add usage examples to all command help text --- docs/docs/infrahubctl/infrahubctl-create.mdx | 6 ++++++ docs/docs/infrahubctl/infrahubctl-delete.mdx | 5 +++++ docs/docs/infrahubctl/infrahubctl-get.mdx | 16 +++++++++++----- docs/docs/infrahubctl/infrahubctl-schema.mdx | 9 +++++++++ docs/docs/infrahubctl/infrahubctl-update.mdx | 11 ++++++++--- infrahub_sdk/ctl/commands/create.py | 6 ++++++ infrahub_sdk/ctl/commands/delete.py | 5 +++++ infrahub_sdk/ctl/commands/get.py | 16 +++++++++++----- infrahub_sdk/ctl/commands/update.py | 11 ++++++++--- infrahub_sdk/ctl/schema.py | 9 +++++++++ 10 files changed, 78 insertions(+), 16 deletions(-) diff --git a/docs/docs/infrahubctl/infrahubctl-create.mdx b/docs/docs/infrahubctl/infrahubctl-create.mdx index b7fcb512..bc491139 100644 --- a/docs/docs/infrahubctl/infrahubctl-create.mdx +++ b/docs/docs/infrahubctl/infrahubctl-create.mdx @@ -5,6 +5,12 @@ Create a new object in Infrahub. Provide field values with repeatable --set flags or supply a JSON/YAML object file via --file. The two modes are mutually exclusive. + +Examples: + infrahubctl create InfraDevice --set name=spine01 --set status=active + infrahubctl create InfraDevice --set name=spine01 --set location=DC1 + infrahubctl create InfraDevice --file devices.yml + **Usage**: ```console diff --git a/docs/docs/infrahubctl/infrahubctl-delete.mdx b/docs/docs/infrahubctl/infrahubctl-delete.mdx index 7f619ed4..e31eef46 100644 --- a/docs/docs/infrahubctl/infrahubctl-delete.mdx +++ b/docs/docs/infrahubctl/infrahubctl-delete.mdx @@ -5,6 +5,11 @@ Delete an Infrahub object. Fetches the object by KIND and IDENTIFIER, then deletes it. Unless --yes is provided, a confirmation prompt is shown first. + +Examples: + infrahubctl delete InfraDevice spine01 + infrahubctl delete InfraDevice spine01 --yes + **Usage**: ```console diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx index 73559d02..5130c2fe 100644 --- a/docs/docs/infrahubctl/infrahubctl-get.mdx +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -2,11 +2,17 @@ Query and display Infrahub objects. -When IDENTIFIER is omitted, lists all objects of the given KIND. -When IDENTIFIER is provided, displays a single object in detail view. - -By default, columns where every value is empty are hidden in table -and CSV output. Use --all-columns to show them. +When IDENTIFIER is omitted the command lists all objects of the given +KIND. When IDENTIFIER is provided it displays a single object in +detail view. Empty columns are hidden by default (use --all-columns). + + +Examples: + infrahubctl get InfraDevice + infrahubctl get InfraDevice spine01 + infrahubctl get InfraDevice --filter name__value=spine01 + infrahubctl get InfraDevice --output json + infrahubctl get InfraDevice --output yaml > backup.yml Exit codes: 0 = results found, 80 = query succeeded but no results. diff --git a/docs/docs/infrahubctl/infrahubctl-schema.mdx b/docs/docs/infrahubctl/infrahubctl-schema.mdx index 25500f0e..34865f6d 100644 --- a/docs/docs/infrahubctl/infrahubctl-schema.mdx +++ b/docs/docs/infrahubctl/infrahubctl-schema.mdx @@ -69,6 +69,11 @@ List all available schema kinds. Displays a table of all node schema entries. Use --filter to narrow results by a case-insensitive match on the kind name. + +Examples: + infrahubctl schema list + infrahubctl schema list --filter Device + **Usage**: ```console @@ -111,6 +116,10 @@ Show details for a specific schema kind. Displays metadata, attributes, and relationships for the requested schema kind in a human-readable format. + +Examples: + infrahubctl schema show InfraDevice + **Usage**: ```console diff --git a/docs/docs/infrahubctl/infrahubctl-update.mdx b/docs/docs/infrahubctl/infrahubctl-update.mdx index 6147d7cb..7e20b60d 100644 --- a/docs/docs/infrahubctl/infrahubctl-update.mdx +++ b/docs/docs/infrahubctl/infrahubctl-update.mdx @@ -2,9 +2,14 @@ Update an existing object in Infrahub. -Fetches the object identified by KIND and IDENTIFIER, applies the -requested changes, and saves the updated object back to the server. -Provide field values with repeatable --set flags or supply a file via --file. +Fetches the object by KIND and IDENTIFIER, applies the requested +changes, and saves back to the server. Use --set or --file. + + +Examples: + infrahubctl update InfraDevice spine01 --set status=active + infrahubctl update InfraDevice spine01 --set location=DC1 + infrahubctl update InfraDevice spine01 --file updates.yml **Usage**: diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py index aebd9449..a54e4f25 100644 --- a/infrahub_sdk/ctl/commands/create.py +++ b/infrahub_sdk/ctl/commands/create.py @@ -34,6 +34,12 @@ async def create_command( Provide field values with repeatable --set flags or supply a JSON/YAML object file via --file. The two modes are mutually exclusive. + + \b + Examples: + infrahubctl create InfraDevice --set name=spine01 --set status=active + infrahubctl create InfraDevice --set name=spine01 --set location=DC1 + infrahubctl create InfraDevice --file devices.yml """ if set_args and file: console.print("[red]Error: --set and --file are mutually exclusive.") diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py index 37345365..3b629a78 100644 --- a/infrahub_sdk/ctl/commands/delete.py +++ b/infrahub_sdk/ctl/commands/delete.py @@ -28,6 +28,11 @@ async def delete_command( Fetches the object by KIND and IDENTIFIER, then deletes it. Unless --yes is provided, a confirmation prompt is shown first. + + \b + Examples: + infrahubctl delete InfraDevice spine01 + infrahubctl delete InfraDevice spine01 --yes """ client = initialize_client(branch=branch) node = await resolve_node(client, kind, identifier, branch=branch) diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index fc1c7854..df46916f 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -40,11 +40,17 @@ async def get_command( ) -> None: """Query and display Infrahub objects. - When IDENTIFIER is omitted, lists all objects of the given KIND. - When IDENTIFIER is provided, displays a single object in detail view. - - By default, columns where every value is empty are hidden in table - and CSV output. Use --all-columns to show them. + When IDENTIFIER is omitted the command lists all objects of the given + KIND. When IDENTIFIER is provided it displays a single object in + detail view. Empty columns are hidden by default (use --all-columns). + + \b + Examples: + infrahubctl get InfraDevice + infrahubctl get InfraDevice spine01 + infrahubctl get InfraDevice --filter name__value=spine01 + infrahubctl get InfraDevice --output json + infrahubctl get InfraDevice --output yaml > backup.yml Exit codes: 0 = results found, 80 = query succeeded but no results. """ diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index e16462e2..023745e4 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -36,9 +36,14 @@ async def update_command( ) -> None: """Update an existing object in Infrahub. - Fetches the object identified by KIND and IDENTIFIER, applies the - requested changes, and saves the updated object back to the server. - Provide field values with repeatable --set flags or supply a file via --file. + Fetches the object by KIND and IDENTIFIER, applies the requested + changes, and saves back to the server. Use --set or --file. + + \b + Examples: + infrahubctl update InfraDevice spine01 --set status=active + infrahubctl update InfraDevice spine01 --set location=DC1 + infrahubctl update InfraDevice spine01 --file updates.yml """ if set_args and file: console.print("[red]Error: --set and --file are mutually exclusive.") diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 936c9897..c5b3498a 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -272,6 +272,11 @@ async def schema_list( Displays a table of all node schema entries. Use --filter to narrow results by a case-insensitive match on the kind name. + + \b + Examples: + infrahubctl schema list + infrahubctl schema list --filter Device """ client = initialize_client(branch=branch) schemas = await client.schema.all(branch=branch) @@ -311,6 +316,10 @@ async def schema_show( Displays metadata, attributes, and relationships for the requested schema kind in a human-readable format. + + \b + Examples: + infrahubctl schema show InfraDevice """ client = initialize_client(branch=branch) node_schema = await client.schema.get(kind=kind, branch=branch) From 0a3a408033707e37dfc920aa98089679a05cf4ec Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:40:26 +0000 Subject: [PATCH 20/32] fix: add 'yaml' to vale spelling exceptions --- .vale/styles/spelling-exceptions.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt index ecba179f..2adc2f1c 100644 --- a/.vale/styles/spelling-exceptions.txt +++ b/.vale/styles/spelling-exceptions.txt @@ -133,6 +133,7 @@ validators Version Control Vitest VLANs +yaml Yaml yamllint YouTube From 4caff143a49e06a29fd56b07604f833defaed4ed Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:43:45 +0000 Subject: [PATCH 21/32] docs: clarify exit code 80 applies only to list mode, not detail lookups --- docs/docs/infrahubctl/infrahubctl-get.mdx | 3 ++- infrahub_sdk/ctl/commands/get.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx index 5130c2fe..76db97f6 100644 --- a/docs/docs/infrahubctl/infrahubctl-get.mdx +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -14,7 +14,8 @@ Examples: infrahubctl get InfraDevice --output json infrahubctl get InfraDevice --output yaml > backup.yml -Exit codes: 0 = results found, 80 = query succeeded but no results. +Exit codes: 0 = results found, 1 = error (including not found in detail +mode), 80 = list query succeeded but returned zero objects. **Usage**: diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index df46916f..55c2f692 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -52,7 +52,8 @@ async def get_command( infrahubctl get InfraDevice --output json infrahubctl get InfraDevice --output yaml > backup.yml - Exit codes: 0 = results found, 80 = query succeeded but no results. + Exit codes: 0 = results found, 1 = error (including not found in detail + mode), 80 = list query succeeded but returned zero objects. """ client = initialize_client(branch=branch) schema = await client.schema.get(kind=kind, branch=branch) From 1b4b2bd4c81416c759e1aa3a5964be77704f4130 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:45:16 +0000 Subject: [PATCH 22/32] docs: document HFID support in identifier argument help text --- docs/docs/infrahubctl/infrahubctl-delete.mdx | 2 +- docs/docs/infrahubctl/infrahubctl-get.mdx | 2 +- docs/docs/infrahubctl/infrahubctl-update.mdx | 2 +- infrahub_sdk/ctl/commands/delete.py | 2 +- infrahub_sdk/ctl/commands/get.py | 2 +- infrahub_sdk/ctl/commands/update.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/docs/infrahubctl/infrahubctl-delete.mdx b/docs/docs/infrahubctl/infrahubctl-delete.mdx index e31eef46..140f147b 100644 --- a/docs/docs/infrahubctl/infrahubctl-delete.mdx +++ b/docs/docs/infrahubctl/infrahubctl-delete.mdx @@ -19,7 +19,7 @@ $ infrahubctl delete [OPTIONS] KIND IDENTIFIER **Arguments**: * `KIND`: Infrahub schema kind [required] -* `IDENTIFIER`: Object ID or display name [required] +* `IDENTIFIER`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) [required] **Options**: diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx index 76db97f6..ca4abe8e 100644 --- a/docs/docs/infrahubctl/infrahubctl-get.mdx +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -26,7 +26,7 @@ $ infrahubctl get [OPTIONS] KIND [IDENTIFIER] **Arguments**: * `KIND`: Infrahub schema kind to query [required] -* `[IDENTIFIER]`: Object ID or display name for detail view +* `[IDENTIFIER]`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) **Options**: diff --git a/docs/docs/infrahubctl/infrahubctl-update.mdx b/docs/docs/infrahubctl/infrahubctl-update.mdx index 7e20b60d..d4ec4846 100644 --- a/docs/docs/infrahubctl/infrahubctl-update.mdx +++ b/docs/docs/infrahubctl/infrahubctl-update.mdx @@ -20,7 +20,7 @@ $ infrahubctl update [OPTIONS] KIND IDENTIFIER **Arguments**: * `KIND`: Infrahub schema kind [required] -* `IDENTIFIER`: Object ID or display name [required] +* `IDENTIFIER`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) [required] **Options**: diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py index 3b629a78..90830b14 100644 --- a/infrahub_sdk/ctl/commands/delete.py +++ b/infrahub_sdk/ctl/commands/delete.py @@ -19,7 +19,7 @@ @catch_exception(console=console) async def delete_command( kind: str = typer.Argument(..., help="Infrahub schema kind"), - identifier: str = typer.Argument(..., help="Object ID or display name"), + identifier: str = typer.Argument(..., help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), yes: bool = typer.Option(False, "--yes", "-y", help="Skip confirmation prompt"), branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), _: str = CONFIG_PARAM, diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py index 55c2f692..ae16f4d0 100644 --- a/infrahub_sdk/ctl/commands/get.py +++ b/infrahub_sdk/ctl/commands/get.py @@ -29,7 +29,7 @@ @catch_exception(console=console) async def get_command( kind: str = typer.Argument(..., help="Infrahub schema kind to query"), - identifier: str | None = typer.Argument(None, help="Object ID or display name for detail view"), + identifier: str | None = typer.Argument(None, help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), filter_args: list[str] | None = typer.Option(None, "--filter", help="Filter in attr__value=x format"), output: OutputFormat | None = typer.Option(None, "--output", "-o", help="Output format"), branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 023745e4..462576a6 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -28,7 +28,7 @@ @catch_exception(console=console) async def update_command( kind: str = typer.Argument(..., help="Infrahub schema kind"), - identifier: str = typer.Argument(..., help="Object ID or display name"), + identifier: str = typer.Argument(..., help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), set_args: list[str] | None = typer.Option(None, "--set", help="Field value in key=value format"), file: Path | None = typer.Option(None, "--file", "-f", help="JSON or YAML file with update data"), branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), From ff5c177c4c3be75fe71077d0233b6050d3feca6a Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:46:09 +0000 Subject: [PATCH 23/32] fix: compare relationship IDs not display strings for change detection --- infrahub_sdk/ctl/commands/update.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 462576a6..38bd011f 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -108,9 +108,13 @@ async def _update_with_set_args( changes.append((key, old_value, new_value)) elif key in schema.relationship_names: rel = getattr(node, key) - old_display = getattr(rel, "display_label", getattr(rel, "id", None)) - setattr(node, key, resolved_data[key]) - changes.append((key, old_display, new_value)) + old_id = getattr(rel, "id", None) + old_display = getattr(rel, "display_label", old_id) + resolved = resolved_data[key] + new_id = resolved.get("id") if isinstance(resolved, dict) else resolved + if old_id != new_id: + setattr(node, key, resolved) + changes.append((key, old_display, new_value)) actual_changes = [(f, o, n) for f, o, n in changes if str(o) != str(n)] From 419b13b2980d3e6ced5f037e3ba53b5eb329807e Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:47:35 +0000 Subject: [PATCH 24/32] fix: warn that kind/identifier are ignored in update --file mode --- infrahub_sdk/ctl/commands/update.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 38bd011f..5d026af7 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -65,6 +65,8 @@ async def update_command( branch=branch, ) elif file: + console.print("[dim]Note: KIND and IDENTIFIER are ignored in --file mode; " + "the file defines target objects.[/dim]") await _update_with_file( client=client, file=file, From f1b73c05fa6df9cf78207363d571a1766b50396d Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:49:29 +0000 Subject: [PATCH 25/32] fix: narrow exception handling in resolve_relationship_values Only catch lookup-miss errors (NodeNotFoundError, SchemaNotFoundError, ValueError, IndexError) before falling back to generic peer search. Auth, network, and other unexpected errors now propagate instead of being silently swallowed. --- infrahub_sdk/ctl/commands/utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/infrahub_sdk/ctl/commands/utils.py b/infrahub_sdk/ctl/commands/utils.py index c773a381..cd62074d 100644 --- a/infrahub_sdk/ctl/commands/utils.py +++ b/infrahub_sdk/ctl/commands/utils.py @@ -5,6 +5,7 @@ import logging from typing import TYPE_CHECKING, Any +from infrahub_sdk.exceptions import NodeNotFoundError, SchemaNotFoundError from infrahub_sdk.schema import NodeSchemaAPI from infrahub_sdk.utils import is_valid_uuid @@ -121,13 +122,13 @@ async def resolve_relationship_values( rel_schema = schema.get_relationship(key) peer_kind = rel_schema.peer - # Try to resolve the string value as a node identifier + # Try to resolve the string value as a node identifier. + # Only fall back to generic peer search on lookup-miss errors; + # re-raise auth, network, and other unexpected errors. try: peer_node = await resolve_node(client, peer_kind, str_value, branch=branch) resolved[key] = {"id": peer_node.id} - except Exception: - # If the peer kind is a generic, try searching all schemas - # for a node matching the value + except (NodeNotFoundError, SchemaNotFoundError, ValueError, IndexError): node = await _search_generic_peer(client, str_value, branch=branch) if node is not None: resolved[key] = {"id": node.id} From 93634c639406f98f9dacf9661b483e77894b6a0f Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:52:49 +0000 Subject: [PATCH 26/32] test: remove redundant @pytest.mark.anyio from resolve_node tests asyncio_mode=auto in pyproject.toml already detects async tests. The anyio markers caused each test to run under both asyncio and trio, doubling the count unnecessarily. --- tests/unit/ctl/commands/test_utils.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/unit/ctl/commands/test_utils.py b/tests/unit/ctl/commands/test_utils.py index 57f370db..43be4d5b 100644 --- a/tests/unit/ctl/commands/test_utils.py +++ b/tests/unit/ctl/commands/test_utils.py @@ -21,7 +21,6 @@ def mock_client() -> MagicMock: return client -@pytest.mark.anyio async def test_resolve_by_uuid(mock_client: MagicMock) -> None: """When the identifier is a valid UUID, ``client.get(id=...)`` is called directly.""" mock_schema = MagicMock() @@ -41,7 +40,6 @@ async def test_resolve_by_uuid(mock_client: MagicMock) -> None: mock_client.get.assert_awaited_once_with(kind="InfraDevice", id=uuid_identifier, branch=None) -@pytest.mark.anyio async def test_resolve_by_default_filter(mock_client: MagicMock) -> None: """When the schema has a ``default_filter``, it is used as a keyword filter.""" mock_schema = MagicMock(spec=NodeSchemaAPI) @@ -64,7 +62,6 @@ async def test_resolve_by_default_filter(mock_client: MagicMock) -> None: ) -@pytest.mark.anyio async def test_resolve_by_hfid(mock_client: MagicMock) -> None: """When the schema defines ``human_friendly_id``, ``client.get(hfid=...)`` is used.""" @@ -88,7 +85,6 @@ async def test_resolve_by_hfid(mock_client: MagicMock) -> None: ) -@pytest.mark.anyio async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: """Multi-component HFID strings (``a/b``) are split on ``/``.""" @@ -112,7 +108,6 @@ async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: ) -@pytest.mark.anyio async def test_resolve_fallback_raises(mock_client: MagicMock) -> None: """When no lookup strategy matches, the fallback ``client.get(id=...)`` call raises.""" @@ -134,7 +129,6 @@ async def test_resolve_fallback_raises(mock_client: MagicMock) -> None: mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="unknown-name", branch=None) -@pytest.mark.anyio async def test_resolve_uses_provided_schema(mock_client: MagicMock) -> None: """When ``schema`` is provided, ``client.schema.get`` is not called.""" pre_fetched_schema = MagicMock(spec=NodeSchemaAPI) @@ -153,7 +147,6 @@ async def test_resolve_uses_provided_schema(mock_client: MagicMock) -> None: mock_client.schema.get.assert_not_awaited() -@pytest.mark.anyio async def test_resolve_default_filter_miss_falls_through_to_hfid(mock_client: MagicMock) -> None: """When the default-filter lookup returns ``None``, the HFID strategy is tried next.""" mock_schema = MagicMock(spec=NodeSchemaAPI) From fa3688f4fb38a99d6b78fefa683c75b1a001afe2 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:56:18 +0000 Subject: [PATCH 27/32] docs: fix contract to show exit code 80 for empty list results --- specs/001-end-user-cli/contracts/cli-commands.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/001-end-user-cli/contracts/cli-commands.md b/specs/001-end-user-cli/contracts/cli-commands.md index 49144cc3..044ab67a 100644 --- a/specs/001-end-user-cli/contracts/cli-commands.md +++ b/specs/001-end-user-cli/contracts/cli-commands.md @@ -14,7 +14,7 @@ All commands accept: - Input: kind (positional), --filter (repeatable), --limit INT, --offset INT - Output: Table with columns for each attribute + relationship (display names) -- Exit 0: results found | Exit 0: no results (empty table) | Exit 1: invalid kind +- Exit 0: results found | Exit 80: no results (empty list) | Exit 1: invalid kind **Detail mode** (with identifier): From 52f77ceabcbdc2e6a0c7cc8cdd65f4f811c00023 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:58:07 +0000 Subject: [PATCH 28/32] docs: update spec to reference infrahubctl instead of infrahub command --- specs/001-end-user-cli/spec.md | 56 +++++++++++++++++----------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/specs/001-end-user-cli/spec.md b/specs/001-end-user-cli/spec.md index 3ffd252c..41363415 100644 --- a/specs/001-end-user-cli/spec.md +++ b/specs/001-end-user-cli/spec.md @@ -1,9 +1,9 @@ -# Feature Specification: End-User CLI (`infrahub` command) +# Feature Specification: End-User CLI (`infrahubctl` CRUD commands) **Feature Branch**: `001-end-user-cli` **Created**: 2026-03-28 **Status**: Draft -**Input**: User description: "Create an `infrahub` CLI command for end users to query, create, and modify data in the Infrahub database, distinct from the developer-oriented `infrahubctl`." +**Input**: User description: "Add CRUD and schema discovery commands to `infrahubctl` for end users to query, create, and modify data in the Infrahub database." ## User Scenarios & Testing *(mandatory)* @@ -17,13 +17,13 @@ An end user wants to retrieve data from Infrahub to answer operational questions **Acceptance Scenarios**: -1. **Given** a running Infrahub instance with data, **When** the user runs `infrahub get `, **Then** a formatted table of all objects of that kind is displayed with attribute columns and relationship columns (showing display names). -2. **Given** a running Infrahub instance, **When** the user runs `infrahub get --filter name__value="spine01"`, **Then** only objects matching the filter are returned. -3. **Given** a running Infrahub instance, **When** the user runs `infrahub get --output json`, **Then** the results are printed as valid JSON to stdout. -4. **Given** a running Infrahub instance, **When** the user runs `infrahub get --output yaml`, **Then** the results are printed in Infrahub Object YAML format (with `apiVersion: infrahub.app/v1`, `kind: Object`, `spec.kind`, and `spec.data` array), suitable for round-tripping back into `infrahub create --file`. -5. **Given** an Infrahub instance, **When** the user runs `infrahub get --branch develop`, **Then** data from the specified branch is returned. -6. **Given** an invalid kind name, **When** the user runs `infrahub get UnknownKind`, **Then** a clear error message is displayed listing available kinds or suggesting corrections. -7. **Given** an existing object, **When** the user runs `infrahub get `, **Then** a detailed view is displayed showing all attributes, relationships, and metadata for that single object. +1. **Given** a running Infrahub instance with data, **When** the user runs `infrahubctl get `, **Then** a formatted table of all objects of that kind is displayed with attribute columns and relationship columns (showing display names). +2. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --filter name__value="spine01"`, **Then** only objects matching the filter are returned. +3. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --output json`, **Then** the results are printed as valid JSON to stdout. +4. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --output yaml`, **Then** the results are printed in Infrahub Object YAML format (with `apiVersion: infrahub.app/v1`, `kind: Object`, `spec.kind`, and `spec.data` array), suitable for round-tripping back into `infrahubctl create --file`. +5. **Given** an Infrahub instance, **When** the user runs `infrahubctl get --branch develop`, **Then** data from the specified branch is returned. +6. **Given** an invalid kind name, **When** the user runs `infrahubctl get UnknownKind`, **Then** a clear error message is displayed listing available kinds or suggesting corrections. +7. **Given** an existing object, **When** the user runs `infrahubctl get `, **Then** a detailed view is displayed showing all attributes, relationships, and metadata for that single object. --- @@ -37,9 +37,9 @@ An end user needs to add new infrastructure data to Infrahub. They run a command **Acceptance Scenarios**: -1. **Given** a running Infrahub instance, **When** the user runs `infrahub create --set name="spine03" --set description="New spine switch"`, **Then** the object is created and a confirmation with the object ID is displayed. -2. **Given** a YAML file with object definitions, **When** the user runs `infrahub create --file objects.yaml`, **Then** all objects in the file are created and a summary of results (created count, errors) is displayed. -3. **Given** invalid attribute or relationship names, **When** the user runs `infrahub create --set invalid_field="value"`, **Then** a clear validation error is displayed indicating which fields are invalid and what the valid attributes and relationships are. +1. **Given** a running Infrahub instance, **When** the user runs `infrahubctl create --set name="spine03" --set description="New spine switch"`, **Then** the object is created and a confirmation with the object ID is displayed. +2. **Given** a YAML file with object definitions, **When** the user runs `infrahubctl create --file objects.yaml`, **Then** all objects in the file are created and a summary of results (created count, errors) is displayed. +3. **Given** invalid attribute or relationship names, **When** the user runs `infrahubctl create --set invalid_field="value"`, **Then** a clear validation error is displayed indicating which fields are invalid and what the valid attributes and relationships are. --- @@ -53,9 +53,9 @@ An end user needs to update attributes on existing infrastructure objects. They **Acceptance Scenarios**: -1. **Given** an existing object, **When** the user runs `infrahub update --set description="Updated description"`, **Then** the object is updated and a confirmation is displayed showing old and new values. -2. **Given** an existing object, **When** the user runs `infrahub update --file updates.yaml`, **Then** the object is updated from the file contents. -3. **Given** a non-existent object identifier, **When** the user runs `infrahub update nonexistent`, **Then** a clear error message indicates the object was not found. +1. **Given** an existing object, **When** the user runs `infrahubctl update --set description="Updated description"`, **Then** the object is updated and a confirmation is displayed showing old and new values. +2. **Given** an existing object, **When** the user runs `infrahubctl update --file updates.yaml`, **Then** the object is updated from the file contents. +3. **Given** a non-existent object identifier, **When** the user runs `infrahubctl update nonexistent`, **Then** a clear error message indicates the object was not found. --- @@ -69,8 +69,8 @@ An end user needs to remove obsolete infrastructure data from Infrahub. They spe **Acceptance Scenarios**: -1. **Given** an existing object, **When** the user runs `infrahub delete `, **Then** a confirmation prompt is shown, and upon confirmation the object is deleted with a success message. -2. **Given** an existing object, **When** the user runs `infrahub delete --yes`, **Then** the object is deleted without a confirmation prompt. +1. **Given** an existing object, **When** the user runs `infrahubctl delete `, **Then** a confirmation prompt is shown, and upon confirmation the object is deleted with a success message. +2. **Given** an existing object, **When** the user runs `infrahubctl delete --yes`, **Then** the object is deleted without a confirmation prompt. 3. **Given** an object with dependencies, **When** the user attempts to delete it, **Then** a clear error message explains what depends on it and how to resolve the conflict. --- @@ -85,9 +85,9 @@ An end user unfamiliar with the data model wants to explore what kinds of object **Acceptance Scenarios**: -1. **Given** a running Infrahub instance, **When** the user runs `infrahub schema list`, **Then** a table of all available kinds is displayed with their namespace, name, and description. -2. **Given** a valid kind name, **When** the user runs `infrahub schema show `, **Then** the kind's attributes, relationships, and constraints are displayed in a readable format. -3. **Given** a partial kind name, **When** the user runs `infrahub schema list --filter "Network"`, **Then** only kinds matching the filter are shown. +1. **Given** a running Infrahub instance, **When** the user runs `infrahubctl schema list`, **Then** a table of all available kinds is displayed with their namespace, name, and description. +2. **Given** a valid kind name, **When** the user runs `infrahubctl schema show `, **Then** the kind's attributes, relationships, and constraints are displayed in a readable format. +3. **Given** a partial kind name, **When** the user runs `infrahubctl schema list --filter "Network"`, **Then** only kinds matching the filter are shown. --- @@ -104,21 +104,21 @@ An end user unfamiliar with the data model wants to explore what kinds of object ### Session 2026-03-28 - Q: How should users specify relationships in create/update commands? → A: Unified `--set` flag for both attributes and relationships (e.g., `--set name="x" --set site="my-site"`). -- Q: Should there be a single-object detail view? → A: `infrahub get ` shows a detail view with all attributes, relationships, and metadata. +- Q: Should there be a single-object detail view? → A: `infrahubctl get ` shows a detail view with all attributes, relationships, and metadata. - Q: How should relationships appear in list/table output? → A: Show as columns with their display name (e.g., site column shows "my-site"). Full relationship detail in detail view only. ## Requirements *(mandatory)* ### Functional Requirements -- **FR-001**: The system MUST provide an `infrahub` CLI entry point, separate from `infrahubctl`. -- **FR-002**: The system MUST support querying objects by kind with `infrahub get ` (list view) and `infrahub get ` (detail view showing all attributes, relationships, and metadata). +- **FR-001**: The system MUST provide CRUD and schema discovery commands within `infrahubctl`. +- **FR-002**: The system MUST support querying objects by kind with `infrahubctl get ` (list view) and `infrahubctl get ` (detail view showing all attributes, relationships, and metadata). - **FR-003**: The system MUST support filtering query results by attribute values. -- **FR-004**: The system MUST support multiple output formats: human-readable table (default), JSON, CSV, and Infrahub Object YAML (`--output yaml`). The YAML format MUST use the Infrahub spec object structure (`apiVersion: infrahub.app/v1`, `kind: Object`, with `spec.kind` and `spec.data` fields), matching the format used by `infrahub create --file`. -- **FR-005**: The system MUST support creating objects with `infrahub create ` using inline `--set` flags (for both attributes and relationships) or file input. -- **FR-006**: The system MUST support updating objects with `infrahub update ` using inline `--set` flags (for both attributes and relationships) or file input. -- **FR-007**: The system MUST support deleting objects with `infrahub delete ` with confirmation. -- **FR-008**: The system MUST support schema discovery with `infrahub schema list` and `infrahub schema show `. +- **FR-004**: The system MUST support multiple output formats: human-readable table (default), JSON, CSV, and Infrahub Object YAML (`--output yaml`). The YAML format MUST use the Infrahub spec object structure (`apiVersion: infrahub.app/v1`, `kind: Object`, with `spec.kind` and `spec.data` fields), matching the format used by `infrahubctl create --file`. +- **FR-005**: The system MUST support creating objects with `infrahubctl create ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-006**: The system MUST support updating objects with `infrahubctl update ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-007**: The system MUST support deleting objects with `infrahubctl delete ` with confirmation. +- **FR-008**: The system MUST support schema discovery with `infrahubctl schema list` and `infrahubctl schema show `. - **FR-009**: The system MUST support specifying a target branch for all operations via `--branch`. - **FR-010**: The system MUST reuse the existing SDK configuration mechanism (server address, API token) from `infrahubctl.toml` or environment variables. - **FR-011**: The system MUST display clear, actionable error messages for all failure modes (connection, authentication, validation, not found). From 432ba94c655f8f90cec7ec1fc7066c55877de715 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 22:59:44 +0000 Subject: [PATCH 29/32] docs: update research.md to reflect infrahubctl integration decision --- specs/001-end-user-cli/research.md | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/specs/001-end-user-cli/research.md b/specs/001-end-user-cli/research.md index c2373cae..db3195c1 100644 --- a/specs/001-end-user-cli/research.md +++ b/specs/001-end-user-cli/research.md @@ -1,21 +1,20 @@ -# Research: End-User CLI (`infrahub` command) +# Research: End-User CLI (infrahubctl CRUD commands) ## R1: Entry Point & Packaging Strategy -**Decision**: Add `infrahub` as a second entry point in `[project.scripts]` within the -same package, pointing to a new app in `infrahub_sdk/ctl/enduser_cli.py`. +**Decision**: Register new commands (`get`, `create`, `update`, `delete`) as top-level +commands on the existing `infrahubctl` app in `infrahub_sdk/ctl/cli_commands.py`, and +add `schema list` / `schema show` to the existing `infrahubctl schema` subgroup. -**Rationale**: The existing `infrahubctl` entry point lives in `infrahub_sdk/ctl/cli.py` -and uses the same `[ctl]` optional dependency group (typer, rich, click, pyyaml). The -end-user CLI needs identical dependencies. A separate package would duplicate -configuration, authentication, and client initialization code. A second entry point in -the same package reuses all existing infrastructure. +**Rationale**: The existing `infrahubctl` entry point already has the CLI framework, +configuration, authentication, and client initialization. Adding commands to it avoids +a separate entry point and keeps the user experience unified under one tool. **Alternatives considered**: +- Separate `infrahub` entry point: initially implemented, then reversed — added + unnecessary complexity and user confusion with two CLI tools - Separate Python package: rejected — duplicates config/client code, complicates releases -- Subcommand of `infrahubctl`: rejected — user explicitly wants separate `infrahub` - command with end-user focus distinct from developer tooling ## R2: CLI Framework & Async Pattern From 2a8fb43a5a5bd7762ca58e6a17f1dcb6f81eafa2 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 23:04:40 +0000 Subject: [PATCH 30/32] fix: preserve leading zeros in --set value coercion Values like '00123' are now kept as strings instead of being coerced to int 123. The coercion only applies when str(int(v)) == v, ensuring no data loss for zip codes, serial numbers, rack unit names, etc. --- infrahub_sdk/ctl/parsers.py | 12 ++++++++---- tests/unit/ctl/test_parsers.py | 5 +++++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/infrahub_sdk/ctl/parsers.py b/infrahub_sdk/ctl/parsers.py index 6d7823d7..006c09e8 100644 --- a/infrahub_sdk/ctl/parsers.py +++ b/infrahub_sdk/ctl/parsers.py @@ -17,15 +17,19 @@ def _coerce_value(value: str) -> Any: Returns: The coerced Python value. """ - # Try integer + # Try integer (preserve leading zeros — "00123" stays a string) try: - return int(value) + int_val = int(value) + if str(int_val) == value: + return int_val except ValueError: pass - # Try float + # Try float (preserve leading zeros) try: - return float(value) + float_val = float(value) + if str(float_val) == value: + return float_val except ValueError: pass diff --git a/tests/unit/ctl/test_parsers.py b/tests/unit/ctl/test_parsers.py index 2cb43f12..02850802 100644 --- a/tests/unit/ctl/test_parsers.py +++ b/tests/unit/ctl/test_parsers.py @@ -46,6 +46,11 @@ def test_string_passthrough(self) -> None: def test_string_with_spaces(self) -> None: assert parse_set_args(["name=my device"])["name"] == "my device" + def test_leading_zero_stays_string(self) -> None: + result = parse_set_args(["code=00123"]) + assert result["code"] == "00123" + assert isinstance(result["code"], str) + def test_empty_string(self) -> None: result = parse_set_args(["name="]) assert not result["name"] From 03714b7780f2ea6f7b33bcc7fe72aa8e9afe1984 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 23:05:57 +0000 Subject: [PATCH 31/32] test: add relationship no-op test for update command --- tests/unit/ctl/commands/test_update.py | 42 ++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py index fdc32b16..b0f70892 100644 --- a/tests/unit/ctl/commands/test_update.py +++ b/tests/unit/ctl/commands/test_update.py @@ -258,6 +258,48 @@ async def passthrough_resolve(client: object, data: object, schema: object, **kw mock_node.save.assert_awaited_once() +def test_update_with_set_args_relationship_noop() -> None: + """``update`` with a relationship --set that resolves to the same target is a no-op.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = ["site"] + + mock_rel = MagicMock() + mock_rel.id = "same-site-id" + mock_rel.display_label = "same-site" + + mock_node = MagicMock() + mock_node.id = "node-noop-001" + mock_node.display_label = "device-noop" + mock_node.site = mock_rel + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + async def resolve_to_same(client: object, data: object, schema: object, **kwargs: object) -> dict: + return {"site": {"id": "same-site-id"}} + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + patch( + "infrahub_sdk.ctl.commands.update.resolve_relationship_values", + side_effect=resolve_to_same, + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "node-noop-001", "--set", "site=same-site"]) + + assert result.exit_code == 0, result.stdout + assert "No changes" in result.stdout + mock_node.save.assert_not_awaited() + + @pytest.mark.parametrize("bad_arg", ["noequals", "=emptykey"]) def test_update_malformed_set_arg(bad_arg: str) -> None: """Malformed --set arguments (no ``=`` or empty key) exit with a non-zero code.""" From bc4bfb655e13693a17afc6d4761d9915f2a997b7 Mon Sep 17 00:00:00 2001 From: Pete Crocker Date: Sat, 28 Mar 2026 23:11:09 +0000 Subject: [PATCH 32/32] fix: ruff formatting for update.py console.print line --- infrahub_sdk/ctl/commands/update.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py index 5d026af7..2beb76ce 100644 --- a/infrahub_sdk/ctl/commands/update.py +++ b/infrahub_sdk/ctl/commands/update.py @@ -65,8 +65,9 @@ async def update_command( branch=branch, ) elif file: - console.print("[dim]Note: KIND and IDENTIFIER are ignored in --file mode; " - "the file defines target objects.[/dim]") + console.print( + "[dim]Note: KIND and IDENTIFIER are ignored in --file mode; the file defines target objects.[/dim]" + ) await _update_with_file( client=client, file=file,