diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index ada692dcd..6eb933a35 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -10,6 +10,8 @@ jobs: os: [ubuntu-latest] steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: diff --git a/.github/workflows/docs_latest.yml b/.github/workflows/docs_latest.yml index 70db34c57..7e8e50667 100644 --- a/.github/workflows/docs_latest.yml +++ b/.github/workflows/docs_latest.yml @@ -44,8 +44,6 @@ jobs: # Note: The sphinx action below can only install a single requirements file. - name: Build JSON Schemas run: tox -e generate_json_schemas - env: - TARGET_VERSION: ${{ env.REF_NAME }} - name: Build BO4E package # Note: This step necessary to correctly set the version in the JSON-Schema-links run: | diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index c205e7d37..c2e3ca6aa 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -51,8 +51,11 @@ jobs: TARGET_VERSION: ${{ github.ref_name }} - name: Check version tag run: | - python -m docs.compatibility.versioning --gh-version ${{ github.ref_name }} \ - --gh-token ${{ secrets.GITHUB_TOKEN }} + # Get last version tag + LAST_VERSION=$(bo4e repo versions -qcn 1 -r ${{ github.ref_name }}) + bo4e pull -t $LAST_VERSION -o tmp/last_schemas + bo4e diff schemas tmp/last_schemas json_schemas -o tmp/diff.json + bo4e diff version-bump tmp/diff.json -aq json_schemas: name: Generate JSON-Schemas runs-on: ubuntu-latest @@ -70,7 +73,10 @@ jobs: python -m pip install --upgrade pip pip install tox - name: Build JSON Schemas - run: tox -e generate_json_schemas + run: | + tox -e generate_json_schemas + # Remove .version file as we don't want to commit it to BO4E-Schemas + rm json_schemas/.version env: TARGET_VERSION: ${{ github.ref_name }} - name: Push schemas to BO4E-Schemas repo diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 37df56b5e..e2a29a07f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,6 +10,8 @@ jobs: os: [ubuntu-latest] steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: diff --git a/.gitignore b/.gitignore index 08c6ec525..8de6ad354 100644 --- a/.gitignore +++ b/.gitignore @@ -175,6 +175,6 @@ src/_bo4e_python_version.py # the autogenerated JSON schemas will be build and pushed to BO4E-Schemas # on release -json_schemas/**/*.json +json_schemas/ tmp/ diff --git a/dev_requirements/requirements-coverage.txt b/dev_requirements/requirements-coverage.txt index 028be4240..070e19e5f 100644 --- a/dev_requirements/requirements-coverage.txt +++ b/dev_requirements/requirements-coverage.txt @@ -1,9 +1,8 @@ -# SHA1:6dafbcf610e9f81897b65ee9142715ab2e793f9e # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-coverage.in' # coverage==7.9.2 - # via -r requirements-coverage.in + # via -r .\dev_requirements\requirements-coverage.in diff --git a/dev_requirements/requirements-formatting.txt b/dev_requirements/requirements-formatting.txt index 50460e9c0..96ad317e6 100644 --- a/dev_requirements/requirements-formatting.txt +++ b/dev_requirements/requirements-formatting.txt @@ -1,21 +1,22 @@ -# SHA1:2c7ffcd29222de3114c7f7994911f1b69d06b6b3 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-formatting.in' # black==25.1.0 - # via -r dev_requirements/requirements-formatting.in -click==8.1.8 + # via -r .\dev_requirements\requirements-formatting.in +click==8.2.1 # via black +colorama==0.4.6 + # via click isort==6.0.1 - # via -r dev_requirements/requirements-formatting.in -mypy-extensions==1.0.0 + # via -r .\dev_requirements\requirements-formatting.in +mypy-extensions==1.1.0 # via black -packaging==24.0 +packaging==25.0 # via black pathspec==0.12.1 # via black -platformdirs==4.2.0 +platformdirs==4.3.8 # via black diff --git a/dev_requirements/requirements-json_schemas.in b/dev_requirements/requirements-json_schemas.in index dca9a9096..dd8022518 100644 --- a/dev_requirements/requirements-json_schemas.in +++ b/dev_requirements/requirements-json_schemas.in @@ -1 +1,2 @@ click +bo4e-cli diff --git a/dev_requirements/requirements-json_schemas.txt b/dev_requirements/requirements-json_schemas.txt index 4ae56804b..1ffe3e5af 100644 --- a/dev_requirements/requirements-json_schemas.txt +++ b/dev_requirements/requirements-json_schemas.txt @@ -1,9 +1,140 @@ -# SHA1:b93ec56608fb302fea7a8019d7c0e8e7239fc033 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-json_schemas.in' # -click==8.1.8 - # via -r dev_requirements/requirements-json_schemas.in +annotated-types==0.7.0 + # via pydantic +anyio==4.9.0 + # via httpx +argcomplete==3.6.2 + # via datamodel-code-generator +autoflake==2.3.1 + # via bo4e-cli +black==25.1.0 + # via datamodel-code-generator +bo4e-cli==0.0.3 + # via -r .\dev_requirements\requirements-json_schemas.in +certifi==2025.7.9 + # via + # httpcore + # httpx + # requests +cffi==1.17.1 + # via + # cryptography + # pynacl +charset-normalizer==3.4.2 + # via requests +click==8.2.1 + # via + # -r .\dev_requirements\requirements-json_schemas.in + # black + # typer +colorama==0.4.6 + # via click +cryptography==45.0.5 + # via pyjwt +datamodel-code-generator==0.31.2 + # via bo4e-cli +deprecated==1.2.18 + # via pygithub +genson==1.3.0 + # via datamodel-code-generator +greenlet==3.2.3 + # via sqlalchemy +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via bo4e-cli +idna==3.10 + # via + # anyio + # httpx + # requests +inflect==7.5.0 + # via datamodel-code-generator +isort==6.0.1 + # via datamodel-code-generator +jinja2==3.1.6 + # via datamodel-code-generator +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +more-itertools==10.7.0 + # via + # bo4e-cli + # inflect +mypy-extensions==1.1.0 + # via black +networkx==3.5 + # via bo4e-cli +packaging==25.0 + # via + # black + # datamodel-code-generator +pathspec==0.12.1 + # via black +platformdirs==4.3.8 + # via black +pycparser==2.22 + # via cffi +pydantic==2.11.7 + # via + # bo4e-cli + # datamodel-code-generator + # sqlmodel +pydantic-core==2.33.2 + # via pydantic +pyflakes==3.4.0 + # via autoflake +pygithub==2.6.1 + # via bo4e-cli +pygments==2.19.2 + # via rich +pyjwt[crypto]==2.10.1 + # via pygithub +pynacl==1.5.0 + # via pygithub +pyyaml==6.0.2 + # via datamodel-code-generator +requests==2.32.4 + # via pygithub +rich==14.0.0 + # via typer +shellingham==1.5.4 + # via typer +sniffio==1.3.1 + # via anyio +sqlalchemy==2.0.41 + # via sqlmodel +sqlmodel==0.0.24 + # via bo4e-cli +typeguard==4.4.4 + # via inflect +typer==0.16.0 + # via bo4e-cli +typing-extensions==4.14.1 + # via + # anyio + # pydantic + # pydantic-core + # pygithub + # sqlalchemy + # typeguard + # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 + # via + # pygithub + # requests +wrapt==1.17.2 + # via deprecated diff --git a/dev_requirements/requirements-linting.txt b/dev_requirements/requirements-linting.txt index f8248bb6a..1d61b2f13 100644 --- a/dev_requirements/requirements-linting.txt +++ b/dev_requirements/requirements-linting.txt @@ -1,21 +1,22 @@ -# SHA1:0e15f8789b9d62fe90d1f1b0b6a7e32f13b99b19 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-linting.in' # -astroid==3.3.9 +astroid==3.3.10 # via pylint -dill==0.3.8 +colorama==0.4.6 + # via pylint +dill==0.4.0 # via pylint isort==6.0.1 # via pylint mccabe==0.7.0 # via pylint -platformdirs==4.2.0 +platformdirs==4.3.8 # via pylint pylint==3.3.7 - # via -r requirements-linting.in -tomlkit==0.12.4 + # via -r .\dev_requirements\requirements-linting.in +tomlkit==0.13.3 # via pylint diff --git a/dev_requirements/requirements-packaging.txt b/dev_requirements/requirements-packaging.txt index 29432a962..7039d1a2a 100644 --- a/dev_requirements/requirements-packaging.txt +++ b/dev_requirements/requirements-packaging.txt @@ -1,63 +1,53 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile dev_requirements/requirements-packaging.in +# pip-compile '.\dev_requirements\requirements-packaging.in' # -backports-tarfile==1.2.0 - # via jaraco-context build==1.2.2.post1 - # via -r requirements-packaging.in -certifi==2024.12.14 + # via -r .\dev_requirements\requirements-packaging.in +certifi==2025.7.9 # via requests -cffi==1.17.1 - # via cryptography -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests -cryptography==45.0.4 - # via secretstorage +colorama==0.4.6 + # via build docutils==0.21.2 # via readme-renderer id==1.5.0 # via twine idna==3.10 # via requests -importlib-metadata==8.6.1 - # via keyring jaraco-classes==3.4.0 # via keyring jaraco-context==6.0.1 # via keyring -jaraco-functools==4.1.0 +jaraco-functools==4.2.1 # via keyring -jeepney==0.9.0 - # via - # keyring - # secretstorage keyring==25.6.0 # via twine markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -more-itertools==10.6.0 +more-itertools==10.7.0 # via # jaraco-classes # jaraco-functools -nh3==0.2.20 +nh3==0.2.21 # via readme-renderer -packaging==24.2 +packaging==25.0 # via # build # twine -pycparser==2.22 - # via cffi -pygments==2.19.1 +pygments==2.19.2 # via # readme-renderer # rich pyproject-hooks==1.2.0 # via build +pywin32-ctypes==0.2.3 + # via keyring readme-renderer==44.0 # via twine requests==2.32.4 @@ -69,15 +59,11 @@ requests-toolbelt==1.0.0 # via twine rfc3986==2.0.0 # via twine -rich==13.9.4 +rich==14.0.0 # via twine -secretstorage==3.3.3 - # via keyring twine==6.1.0 - # via -r requirements-packaging.in -urllib3==2.3.0 + # via -r .\dev_requirements\requirements-packaging.in +urllib3==2.5.0 # via # requests # twine -zipp==3.21.0 - # via importlib-metadata diff --git a/dev_requirements/requirements-tests.txt b/dev_requirements/requirements-tests.txt index 94ee9a7ab..a40f5b3b6 100644 --- a/dev_requirements/requirements-tests.txt +++ b/dev_requirements/requirements-tests.txt @@ -1,21 +1,24 @@ -# SHA1:d8bf34f7830b949c538f22e73d2b6ebb2065a573 # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-tests.in' # +colorama==0.4.6 + # via pytest dictdiffer==0.9.0 - # via -r requirements-tests.in -iniconfig==2.0.0 + # via -r .\dev_requirements\requirements-tests.in +iniconfig==2.1.0 + # via pytest +packaging==25.0 # via pytest -packaging==24.0 +pluggy==1.6.0 # via pytest -pluggy==1.5.0 +pygments==2.19.2 # via pytest -pytest==8.3.5 +pytest==8.4.1 # via - # -r requirements-tests.in + # -r .\dev_requirements\requirements-tests.in # pytest-datafiles pytest-datafiles==3.0.0 - # via -r requirements-tests.in + # via -r .\dev_requirements\requirements-tests.in diff --git a/dev_requirements/requirements-type_check.txt b/dev_requirements/requirements-type_check.txt index c662bbbec..16ade181d 100644 --- a/dev_requirements/requirements-type_check.txt +++ b/dev_requirements/requirements-type_check.txt @@ -1,31 +1,34 @@ -# SHA1:9a286c239b1bb587f83dac5fba3e8a2ebfa4029b # -# This file is autogenerated by pip-compile-multi -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile-multi +# pip-compile '.\dev_requirements\requirements-type_check.in' # -iniconfig==2.0.0 +colorama==0.4.6 + # via pytest +iniconfig==2.1.0 # via pytest mypy==1.17.0 # via -r requirements-type_check.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy -networkx==3.4.2 +networkx==3.5 # via networkx-stubs networkx-stubs==0.0.1 - # via -r requirements-type_check.in -packaging==24.0 + # via -r .\dev_requirements\requirements-type_check.in +packaging==25.0 # via pytest pathspec==0.12.1 # via mypy -pluggy==1.5.0 +pluggy==1.6.0 # via pytest -pytest==8.3.5 - # via -r requirements-type_check.in +pygments==2.19.2 + # via pytest +pytest==8.4.1 + # via -r .\dev_requirements\requirements-type_check.in types-requests==2.32.4.20250611 - # via -r requirements-type_check.in -typing-extensions==4.11.0 + # via -r .\dev_requirements\requirements-type_check.in +typing-extensions==4.14.1 # via mypy -urllib3==2.2.2 +urllib3==2.5.0 # via types-requests diff --git a/docs/changelog.rst b/docs/changelog.rst index 66a680723..cc51e8bc1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -4,38 +4,45 @@ Changelog ========= -You can find the changelog on the ` GitHub Release page`_. +You can find the changelog on the `GitHub Release page `_. ============= Compatibility ============= The table below shows the compatibility matrix of the last BO4E versions. -You can also download the compatibility matrix as CSV file `here <_static/tables/compatibility_matrix.csv>`_. - -Legend: - -+------+------------------------------------------------------+ -| 🟢 | | Compatible | -| | | No changes in the data model | -+------+------------------------------------------------------+ -| 🟡 | | Compatible | -| | | Only non-critical changes in the data model | -| | | e.g. added fields, changed doc strings | -+------+------------------------------------------------------+ -| 🔴 | | Incompatible | -| | | Critical changes in the data model | -| | | e.g. removed fields, changed types | -+------+------------------------------------------------------+ -| ➕ | | Compatible | -| | | Data model was added in this version | -+------+------------------------------------------------------+ -| ➖ | | Incompatible | -| | | Data model was removed in this version | -+------+------------------------------------------------------+ -| \- | | Data model not existent in this version | -| | | was removed before or will be added in future | -+------+------------------------------------------------------+ +You can also download the compatibility matrix as CSV file `here <_static/tables/compatibility_matrix.csv>`__. + +.. csv-table:: Diff-files for the version diffs shown in the compatibility matrix + :file: _static/tables/changes_table.csv + :header-rows: 1 + +.. table:: Legend for compatibility matrix + :widths: 10 90 + + +------+------------------------------------------------------+ + | Icon | Description | + +======+======================================================+ + | 🟢 | Compatible | + | | No changes in the data model | + +------+------------------------------------------------------+ + | 🟡 | Compatible | + | | Only non-critical changes in the data model | + | | e.g. added fields, changed doc strings | + +------+------------------------------------------------------+ + | 🔴 | Incompatible | + | | Critical changes in the data model | + | | e.g. removed fields, changed types | + +------+------------------------------------------------------+ + | ➕ | Compatible | + | | Data model was added in this version | + +------+------------------------------------------------------+ + | ➖ | Incompatible | + | | Data model was removed in this version | + +------+------------------------------------------------------+ + | \- | Data model not existent in this version | + | | was removed before or will be added in future | + +------+------------------------------------------------------+ .. csv-table:: Compatibility matrix :file: _static/tables/compatibility_matrix.csv diff --git a/docs/compatibility/__init__.py b/docs/compatibility/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/compatibility/__main__.py b/docs/compatibility/__main__.py deleted file mode 100644 index 0550def62..000000000 --- a/docs/compatibility/__main__.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -This module provides functions to compare the BO4E JSON schemas of different versions. -It also contains functions to query GitHub for the latest BO4E versions to compare with the schemas of the current -work tree. -Additionally, it implements a little cache functionality to avoid multiple downloads of the same versions e.g. -if you're testing locally. -""" - -import logging -import re -from pathlib import Path - -import bost.operations -from bost.pull import OWNER, REPO - -from . import diff, loader, matrix, versioning - -logger = logging.getLogger(__name__) - -REGEX_RELEASE_VERSION = re.compile(r"^v(\d{6}\.\d+\.\d+)$") -REGEX_RELEASE_CANDIDATE_VERSION = re.compile(r"^v(\d{6}\.\d+\.\d+)-rc\d+$") - - -def _monkey_patch_bost_regex_if_local_testing(version: str) -> None: - regex_expected_version = re.compile(r"^v\d+\.\d+\.\d+(?:-rc\d+)?$") - if not regex_expected_version.fullmatch(version): - bost.operations.REF_ONLINE_REGEX = re.compile( - rf"^https://raw\.githubusercontent\.com/(?:{OWNER.upper()}|{OWNER.lower()}|Hochfrequenz)/{REPO}/" - rf"(?P[^/]+)/" - r"src/bo4e_schemas/(?P(?:\w+/)*)(?P\w+)\.json#?$" - ) - - -def create_tables_for_doc( - compatibility_matrix_output_file: Path, - gh_version: str, - *, - gh_token: str | None = None, - last_n_versions: int = 2, -) -> None: - """ - Creates the compatibility matrix for the documentation. The output is a csv file. This can be referenced - inside Sphinx documentation. See https://sublime-and-sphinx-guide.readthedocs.io/en/latest/tables.html#csv-files - for more information. - If you have problems with rate limiting, please set gh_token. - The compatibility matrix will be built for last_n_versions + the current version in the checkout working directory. - If you set last_n_versions = 0 all versions since v202401.0.0 will be compared. - Note: The matrix will never contain the first version as column. Each column is a comparison to the version before. - Note: Only functional releases will be compared since technical releases are enforced to be fully compatible. - See https://github.com/bo4e/BO4E-python/issues/784 - """ - _monkey_patch_bost_regex_if_local_testing(gh_version) - logger.info("Retrieving the last %d release versions", last_n_versions) - versions = list( - reversed(list(versioning.get_last_n_tags(last_n_versions, ref=gh_version, exclude_technical_bumps=True))) - ) - logger.info("Comparing versions iteratively: %s", " -> ".join([*versions, gh_version])) - changes_iterables = diff.compare_bo4e_versions_iteratively(versions, gh_version, gh_token=gh_token) - logger.info("Building namespaces") - changes = {key: list(value) for key, value in changes_iterables.items()} - namespaces = {version: list(loader.get_namespace(loader.BO4E_BASE_DIR / version)) for version in versions} - namespaces[gh_version] = list(loader.get_namespace(loader.BO4E_BASE_DIR / gh_version)) - logger.info("Creating compatibility matrix") - matrix.create_compatibility_matrix_csv( - compatibility_matrix_output_file, [*versions, gh_version], namespaces, changes - ) - - -def test_create_tables_for_doc() -> None: - """ - Test the create_tables_for_doc function locally without building the entire documentation. - Needs the JSON schemas to be present in /json_schemas with TARGET_VERSION set to "local". - """ - create_tables_for_doc( - Path(__file__).parents[1] / "compatibility_matrix.csv", - "local", - last_n_versions=0, - ) diff --git a/docs/compatibility/change_schemas.py b/docs/compatibility/change_schemas.py deleted file mode 100644 index b7aa54928..000000000 --- a/docs/compatibility/change_schemas.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Contains the classes to model changes between two BO4E versions. -""" - -from enum import StrEnum -from typing import Any, Iterable - -from pydantic import BaseModel - - -class ChangeType(StrEnum): - """ - This enum class lists the different types of changes of a single change between two BO4E versions. - """ - - FIELD_ADDED = "field_added" - FIELD_REMOVED = "field_removed" - FIELD_DEFAULT_CHANGED = "field_default_changed" - FIELD_DESCRIPTION_CHANGED = "field_description_changed" - FIELD_TITLE_CHANGED = "field_title_changed" - # field type change types - FIELD_CARDINALITY_CHANGED = "field_cardinality_changed" - FIELD_REFERENCE_CHANGED = "field_reference_changed" - FIELD_STRING_FORMAT_CHANGED = "field_string_format_changed" - FIELD_ANY_OF_TYPE_ADDED = "field_any_of_type_added" - FIELD_ANY_OF_TYPE_REMOVED = "field_any_of_type_removed" - FIELD_ALL_OF_TYPE_ADDED = "field_all_of_type_added" - FIELD_ALL_OF_TYPE_REMOVED = "field_all_of_type_removed" - FIELD_TYPE_CHANGED = "field_type_changed" # An arbitrary unclassified change in type - - CLASS_ADDED = "class_added" - CLASS_REMOVED = "class_removed" - CLASS_DESCRIPTION_CHANGED = "class_description_changed" - - ENUM_VALUE_ADDED = "enum_value_added" - ENUM_VALUE_REMOVED = "enum_value_removed" - - -class Change(BaseModel): - """ - This pydantic class models a single change between two BO4E versions. - """ - - type: ChangeType - old: Any - new: Any - old_trace: str - new_trace: str - - def __str__(self) -> str: - return f"{self.type}: {self.old} -> {self.new}" - - -def is_change_critical(change: Change) -> bool: - """ - This function checks if a change is critical i.e. if the new value is incompatible to the old value. - """ - return change.type in ( - ChangeType.FIELD_REMOVED, - ChangeType.FIELD_TYPE_CHANGED, - ChangeType.FIELD_CARDINALITY_CHANGED, - ChangeType.FIELD_REFERENCE_CHANGED, - ChangeType.FIELD_STRING_FORMAT_CHANGED, - ChangeType.FIELD_ANY_OF_TYPE_ADDED, - ChangeType.FIELD_ANY_OF_TYPE_REMOVED, - ChangeType.FIELD_ALL_OF_TYPE_ADDED, - ChangeType.FIELD_ALL_OF_TYPE_REMOVED, - ChangeType.CLASS_REMOVED, - ChangeType.ENUM_VALUE_REMOVED, - ) - - -def filter_non_crit(changes: Iterable[Change]) -> Iterable[Change]: - """ - This function filters out all non-critical changes. - """ - return (change for change in changes if is_change_critical(change)) diff --git a/docs/compatibility/diff.py b/docs/compatibility/diff.py deleted file mode 100644 index f54bbaf02..000000000 --- a/docs/compatibility/diff.py +++ /dev/null @@ -1,382 +0,0 @@ -""" -Contains the logic to detect the different changes between two BO4E versions. -""" - -import itertools -import logging -import re -from pathlib import Path -from typing import Iterable, Sequence - -from bost.schema import AllOf, AnyOf, Array, Object, Reference, SchemaRootType, SchemaType, StrEnum, String, TypeBase - -from . import change_schemas, loader - -logger = logging.getLogger(__name__) - -REGEX_IGNORE_VERSION = re.compile(r"v\d+\.\d+\.\d+(-rc\d+)?") - - -def _diff_type_base( - schema_old: TypeBase, schema_new: TypeBase, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two type base schemas and yields the changes. - """ - if schema_old.title != schema_new.title: - logger.warning( - ( - "Title should not change. Renaming is not detectable and the titles are autogenerated.\n" - "'%s' -> '%s'\n" - "%s -> %s" - ), - schema_old.title, - schema_new.title, - old_trace, - new_trace, - ) - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_TITLE_CHANGED, - old=schema_old.title, - new=schema_new.title, - old_trace=old_trace, - new_trace=new_trace, - ) - if REGEX_IGNORE_VERSION.sub(schema_old.description, "{__gh_version__}") != REGEX_IGNORE_VERSION.sub( - schema_new.description, "{__gh_version__}" - ): - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_DESCRIPTION_CHANGED, - old=schema_old.description, - new=schema_new.description, - old_trace=old_trace, - new_trace=new_trace, - ) - if schema_old.default != schema_new.default and schema_old.title != " Version" and schema_new.title != " Version": - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_DEFAULT_CHANGED, - old=schema_old.default, - new=schema_new.default, - old_trace=old_trace, - new_trace=new_trace, - ) - - -def _diff_enum_schemas( - schema_old: StrEnum, schema_new: StrEnum, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two enum schemas and yields the changes. - """ - new_enum_values = set(schema_new.enum) - for old_enum_value in schema_old.enum: - if old_enum_value not in new_enum_values: - yield change_schemas.Change( - type=change_schemas.ChangeType.ENUM_VALUE_REMOVED, - old=old_enum_value, - new=None, - old_trace=old_trace, - new_trace=new_trace, - ) - else: - new_enum_values.remove(old_enum_value) - for new_enum_value in new_enum_values: - yield change_schemas.Change( - type=change_schemas.ChangeType.ENUM_VALUE_ADDED, - old=None, - new=new_enum_value, - old_trace=old_trace, - new_trace=new_trace, - ) - - -def _diff_object_schemas( - schema_old: Object, schema_new: Object, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two object schemas and yields the changes. - """ - new_properties = set(schema_new.properties.keys()) - for key, value in schema_old.properties.items(): - if key not in schema_new.properties: - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_REMOVED, - old=value, - new=None, - old_trace=f"{old_trace}.properties['{key}']", - new_trace=new_trace, - ) - else: - new_properties.remove(key) - - # Field exists in both schemas, check for further changes - yield from _diff_schema_type( - value, - schema_new.properties[key], - f"{old_trace}.properties['{key}']", - f"{new_trace}.properties['{key}']", - ) - for key in new_properties: - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_ADDED, - old=None, - new=schema_new.properties[key], - old_trace=old_trace, - new_trace=f"{new_trace}.properties['{key}']", - ) - - -def _diff_ref_schemas( - schema_old: Reference, schema_new: Reference, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two reference schemas and yields a change if the references are different. - Even if the referenced schema only got renamed or moved, the reference will be treated as different - because in any client application you would have to update the references. - """ - if schema_old.ref != schema_new.ref: - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_REFERENCE_CHANGED, - old=schema_old.ref, - new=schema_new.ref, - old_trace=old_trace, - new_trace=new_trace, - ) - - -def _diff_array_schemas( - schema_old: Array, schema_new: Array, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two array schemas and yields the changes. - """ - yield from _diff_schema_type(schema_old.items, schema_new.items, f"{old_trace}.items", f"{new_trace}.items") - - -def _diff_any_of_or_all_of_schemas( - schema_old: AnyOf | AllOf, schema_new: AnyOf | AllOf, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two anyOf or allOf schemas and yields the changes. - """ - assert type(schema_old) is type(schema_new), "Internal error: This function should only be called for equal types" - if isinstance(schema_old, AnyOf): - query_str = "any_of" - else: - query_str = "all_of" - found_new_types = set() - for old_index, old_type in enumerate(getattr(schema_old, query_str)): - found_old_in_new = False - for new_index, new_type in enumerate(getattr(schema_new, query_str)): - changes = list( - _diff_schema_type( - old_type, new_type, f"{old_trace}.{query_str}[{old_index}]", f"{new_trace}.{query_str}[{new_index}]" - ) - ) - if not any(change_schemas.filter_non_crit(changes)): - # The types are equal (except for non-critical changes), yield the non-critical changes - found_old_in_new = True - assert new_index not in found_new_types, "Internal error: Duplicate type in anyOf" - found_new_types.add(new_index) - yield from changes - break - if not found_old_in_new: - yield change_schemas.Change( - type=( - change_schemas.ChangeType.FIELD_ANY_OF_TYPE_REMOVED - if isinstance(schema_old, AnyOf) - else change_schemas.ChangeType.FIELD_ALL_OF_TYPE_REMOVED - ), - old=old_type, - new=None, - old_trace=f"{old_trace}.{query_str}[{old_index}]", - new_trace=f"{new_trace}.{query_str}", - ) - not_found_indices = set(range(len(getattr(schema_new, query_str)))) - found_new_types - for new_index in not_found_indices: - yield change_schemas.Change( - type=( - change_schemas.ChangeType.FIELD_ANY_OF_TYPE_ADDED - if isinstance(schema_old, AnyOf) - else change_schemas.ChangeType.FIELD_ALL_OF_TYPE_ADDED - ), - old=None, - new=getattr(schema_new, query_str)[new_index], - old_trace=old_trace, - new_trace=f"{new_trace}.{query_str}[{new_index}]", - ) - - -def _diff_string_schemas( - schema_old: String, schema_new: String, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two string schemas and yields the changes. - """ - if schema_old.format != schema_new.format: - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_STRING_FORMAT_CHANGED, - old=schema_old.format, - new=schema_new.format, - old_trace=old_trace, - new_trace=new_trace, - ) - - -def _diff_schema_differing_types( - schema_old: SchemaType, schema_new: SchemaType, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two differing schema types and yields the changes. - """ - assert type(schema_old) is not type( - schema_new - ), "Internal error: This function should only be called for differing types" - # Types are different. Check if it is "only" a change in cardinality - if isinstance(schema_old, Object) and isinstance(schema_new, Array): - sub_changes = list(_diff_schema_type(schema_old, schema_new.items, old_trace, f"{new_trace}.items")) - elif isinstance(schema_old, Array) and isinstance(schema_new, Object): - sub_changes = list(_diff_schema_type(schema_old.items, schema_new, f"{old_trace}.items", new_trace)) - else: - sub_changes = None - - if sub_changes is None or any(change_schemas.filter_non_crit(sub_changes)): - # Treat the types as equal iff there are no critical changes between the types - # In if-Block, the types are different - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_TYPE_CHANGED, - old=schema_old, - new=schema_new, - old_trace=old_trace, - new_trace=new_trace, - ) - else: - # If the types are equal (except for non-critical changes), yield the non-critical changes - # plus a change in cardinality - yield from sub_changes - # If the type of one schema is equal to the items type of the other, there is a change in cardinality - yield change_schemas.Change( - type=change_schemas.ChangeType.FIELD_CARDINALITY_CHANGED, - old=schema_old, - new=schema_new, - old_trace=old_trace, - new_trace=new_trace, - ) - - -def _diff_schema_type( - schema_old: SchemaType, schema_new: SchemaType, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two schema types and yields the changes. - """ - yield from _diff_type_base(schema_old, schema_new, old_trace, new_trace) - if type(schema_old) is not type(schema_new): - yield from _diff_schema_differing_types(schema_old, schema_new, old_trace, new_trace) - # Even if the types are equal on this shallow level, we must do some more checks for certain - # types. - elif isinstance(schema_new, StrEnum): - yield from _diff_enum_schemas(schema_old, schema_new, old_trace, new_trace) # type: ignore[arg-type] - # mypy isn't able to know that type(schema_new) is type(schema_old) here (and in the following) - elif isinstance(schema_new, Object): - yield from _diff_object_schemas(schema_old, schema_new, old_trace, new_trace) # type: ignore[arg-type] - elif isinstance(schema_new, Reference): - yield from _diff_ref_schemas(schema_old, schema_new, old_trace, new_trace) # type: ignore[arg-type] - elif isinstance(schema_new, Array): - yield from _diff_array_schemas(schema_old, schema_new, old_trace, new_trace) # type: ignore[arg-type] - elif isinstance(schema_new, (AnyOf, AllOf)): - yield from _diff_any_of_or_all_of_schemas( - schema_old, # type: ignore[arg-type] - schema_new, - old_trace, - new_trace, - ) - # Any other types are definitely equal at this point - - -def _diff_root_schemas( - schema_old: SchemaRootType, schema_new: SchemaRootType, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two root schemas and yields the changes. - """ - yield from _diff_schema_type(schema_old, schema_new, old_trace, new_trace) - - -def diff_schemas( - schemas_old: Path, schemas_new: Path, old_trace: str, new_trace: str -) -> Iterable[change_schemas.Change]: - """ - This function compares two BO4E versions and yields the changes. - Note: The paths to the old and the new schemas should correspond to the same root node of the tree structure. - I.e. the direct subdirectories should be "bo", "com" and "enum". - """ - old_schema_files = {file.relative_to(schemas_old) for file in schemas_old.rglob("*.json")} - new_schema_files = {file.relative_to(schemas_new) for file in schemas_new.rglob("*.json")} - - for schema_file in old_schema_files - new_schema_files: - yield change_schemas.Change( - type=change_schemas.ChangeType.CLASS_REMOVED, - old=loader.load_schema_file(schemas_old / schema_file), - new=None, - old_trace=f"{old_trace}/{'/'.join(schema_file.with_suffix('').parts)}#", - new_trace=f"{new_trace}/#", - ) - for schema_file in new_schema_files - old_schema_files: - yield change_schemas.Change( - type=change_schemas.ChangeType.CLASS_ADDED, - old=None, - new=loader.load_schema_file(schemas_new / schema_file), - old_trace=f"{old_trace}/#", - new_trace=f"{new_trace}/{'/'.join(schema_file.with_suffix('').parts)}#", - ) - for schema_file in old_schema_files & new_schema_files: - yield from _diff_root_schemas( - loader.load_schema_file(schemas_old / schema_file), - loader.load_schema_file(schemas_new / schema_file), - f"{old_trace}/{'/'.join(schema_file.with_suffix('').parts)}#", - f"{new_trace}/{'/'.join(schema_file.with_suffix('').parts)}#", - ) - - -def compare_bo4e_versions( - version_old: str, version_new: str, gh_token: str | None = None, from_local: bool = False -) -> Iterable[change_schemas.Change]: - """ - Compare the old version with the new version. - If version_new is None use the BO4E version of the checkout working directory by assuming the compiled json - schemas in /json_schemas. - """ - dir_old_schemas = loader.pull_or_reuse_bo4e_version(version_old, gh_token) - dir_new_schemas = loader.pull_or_reuse_bo4e_version(version_new, gh_token, from_local=from_local) - print(f"Comparing {version_old} with {version_new}") - yield from diff_schemas(dir_old_schemas, dir_new_schemas, version_old, version_new) - - -def compare_bo4e_versions_iteratively( - versions: Sequence[str], cur_version: str | None = None, gh_token: str | None = None -) -> dict[tuple[str, str], Iterable[change_schemas.Change]]: - """ - Compare the versions iteratively. Each version at index i will be compared to the version at index i+1. - Additionally, if cur_version is provided, the last version in the list will be compared to the version - in the checkout working directory. The value of cur_version will be used to set the key in the returned - dict. - Note: - - versions must contain at least one element. - - versions should be sorted in ascending order. - - if using cur_version, ensure that the json schemas of the checkout working directory - were build on beforehand. They should be located in /json_schemas. - """ - print(f"Comparing versions {versions} with cur_version {cur_version}") - if len(versions) == 0: - print("No versions to compare.") - return {} - changes = {} - last_version: str = versions[0] # This value is never used but makes mypy and pylint happy - for version_old, version_new in itertools.pairwise(versions): - last_version = version_new - changes[version_old, version_new] = compare_bo4e_versions(version_old, version_new, gh_token) - if cur_version is not None: - changes[last_version, cur_version] = compare_bo4e_versions(last_version, cur_version, gh_token, from_local=True) - print("Comparisons finished.") - return changes diff --git a/docs/compatibility/loader.py b/docs/compatibility/loader.py deleted file mode 100644 index 5e6651f94..000000000 --- a/docs/compatibility/loader.py +++ /dev/null @@ -1,114 +0,0 @@ -""" -Contains functions to load and save schema files and changes -""" - -import json -import shutil -from pathlib import Path -from typing import Iterable - -from bost import main as bost_main -from bost.operations import update_references as bost_update_references -from bost.pull import SchemaMetadata -from bost.schema import SchemaRootType -from pydantic import TypeAdapter - -from . import change_schemas - -BO4E_BASE_DIR = Path(__file__).parents[2] / "tmp/bo4e_json_schemas" -LOCAL_JSON_SCHEMA_DIR = Path(__file__).parents[2] / "json_schemas" - - -def load_schema_file(path: Path) -> SchemaRootType: - """ - Load a schema file and return the parsed schema - """ - return TypeAdapter(SchemaRootType).validate_json(path.read_text("utf-8")) - # mypy has problems to infer the Union type here. - - -def load_changes(path: Path) -> list[change_schemas.Change]: - """ - Load a changes file and return the parsed changes - """ - return TypeAdapter(list[change_schemas.Change]).validate_json(path.read_text("utf-8")) - - -def save_changes(path: Path, changes: Iterable[change_schemas.Change]) -> None: - """ - Save the changes to a file - """ - with open(path, "w", encoding="utf-8") as file: - json.dump( - TypeAdapter(list[change_schemas.Change]).dump_python(list(changes), mode="json"), - file, - ) - - -def get_namespace(path: Path) -> Iterable[tuple[str, ...]]: - """ - Get the namespace from a file - """ - for schema_file in path.rglob("*.json"): - sub_path = schema_file.relative_to(path).parts[:-1] - yield *sub_path, schema_file.stem - - -def pull_bo4e_version(version: str, output: Path, gh_token: str | None = None) -> None: - """ - Pull the BO4E version from the given version string. - """ - bost_main( - output=output, - target_version=version, - update_refs=True, - set_default_version=False, - clear_output=True, - token=gh_token, - ) - - -def update_references(path: Path, version: str) -> None: - """ - Update the references in the given path. This step is needed for the local build. - """ - schema_namespace = {} - for schema_path in get_namespace(path): - local_path = Path(path, *schema_path).with_suffix(".json") - schema_namespace[schema_path[-1]] = SchemaMetadata( - class_name=schema_path[-1], - download_url="", - module_path=schema_path, - file_path=local_path, - cached_path=local_path, - token=None, - ) - for schema_metadata in schema_namespace.values(): - bost_update_references(schema_metadata, schema_namespace, version) - schema_metadata.save() - - -def pull_or_reuse_bo4e_version(version: str, gh_token: str | None = None, from_local: bool = False) -> Path: - """ - Pull the BO4E version from the given version string or reuse the version if it was already pulled before. - If version is None use the BO4E version of the checkout working directory by assuming the compiled json - schemas in /json_schemas. - Returns the path of the bo4e directory. - """ - bo4e_dir = BO4E_BASE_DIR / version - - if from_local: - if not any(LOCAL_JSON_SCHEMA_DIR.rglob("*.json")): - raise ValueError( - "No local json schemas found in /json_schemas. " - "Please ensure that the json schemas are build on beforehand." - ) - if bo4e_dir.exists(): - shutil.rmtree(bo4e_dir) - shutil.copytree(LOCAL_JSON_SCHEMA_DIR, bo4e_dir) - update_references(bo4e_dir, version) - elif any(bo4e_dir.rglob("*.json")): - return bo4e_dir - else: - pull_bo4e_version(version, bo4e_dir, gh_token) - return bo4e_dir diff --git a/docs/compatibility/matrix.py b/docs/compatibility/matrix.py deleted file mode 100644 index ba21373a5..000000000 --- a/docs/compatibility/matrix.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -This module contains the logic to create the compatibility matrix from a list of changes. -""" - -import csv -import itertools -from enum import StrEnum -from pathlib import Path -from typing import Any as _Any -from typing import Mapping, Sequence - -from . import change_schemas - - -class ChangeSymbol(StrEnum): - """ - This enum class lists the different symbols of changes in the compatibility matrix. - """ - - CHANGE_NONE = "🟢" - CHANGE_NON_CRITICAL = "🟡" - CHANGE_CRITICAL = "🔴" - NON_EXISTENT = "\\-" - ADDED = "➕" - REMOVED = "➖" - - -def determine_symbol( - changes: Sequence[change_schemas.Change], namespace: Sequence[tuple[str, ...]], cls: tuple[str, ...] -) -> ChangeSymbol: - """ - Determine the symbol of a change. - """ - if len(changes) == 1 and changes[0].type == change_schemas.ChangeType.CLASS_REMOVED: - return ChangeSymbol.REMOVED - if len(changes) == 1 and changes[0].type == change_schemas.ChangeType.CLASS_ADDED: - return ChangeSymbol.ADDED - if cls not in namespace: - return ChangeSymbol.NON_EXISTENT - if len(changes) == 0: - return ChangeSymbol.CHANGE_NONE - - assert all( - change.type not in (change_schemas.ChangeType.CLASS_ADDED, change_schemas.ChangeType.CLASS_REMOVED) - for change in changes - ), "Internal error: CLASS_ADDED and CLASS_REMOVED must be the only change per class if present." - if any(change_schemas.is_change_critical(change) for change in changes): - return ChangeSymbol.CHANGE_CRITICAL - return ChangeSymbol.CHANGE_NON_CRITICAL - - -def create_compatibility_matrix_csv( - output: Path, - versions: Sequence[str], - namespaces: Mapping[str, Sequence[tuple[str, ...]]], - changes: Mapping[tuple[str, str], Sequence[change_schemas.Change]], -) -> None: - """ - Create a compatibility matrix csv file from the given changes. - """ - output.parent.mkdir(parents=True, exist_ok=True) - with open(output, "w", encoding="utf-8") as file: - csv_writer = csv.writer(file, delimiter=",", lineterminator="\n", escapechar="/") - csv_writer.writerow(("", *versions[1:])) - all_classes: set[tuple[str, ...]] = set(itertools.chain.from_iterable(namespaces.values())) - - for class_path in sorted(all_classes, key=lambda cls: tuple(cls_part.lower() for cls_part in cls)): - row = [class_path[-1]] - class_path_str = "/".join(class_path) + "#" - for version_old, version_new in itertools.pairwise(versions): - changes_related_to_class = [ - change - for change in changes[(version_old, version_new)] - if change.old_trace.startswith(class_path_str) or change.new_trace.startswith(class_path_str) - ] - row.append(determine_symbol(changes_related_to_class, namespaces[version_new], class_path).value) - csv_writer.writerow(row) diff --git a/docs/compatibility/versioning.py b/docs/compatibility/versioning.py deleted file mode 100644 index acb62db82..000000000 --- a/docs/compatibility/versioning.py +++ /dev/null @@ -1,449 +0,0 @@ -""" -This module provides a CLI to check if a version tag has the expected format we expect in the BO4E repository. -""" - -import functools -import logging -import re -import subprocess -import sys -from typing import ClassVar, Iterable, Literal, Optional - -import click -from github import Github -from github.Auth import Token -from github.Repository import Repository -from more_itertools import one -from pydantic import BaseModel, ConfigDict - -from . import diff - -logger = logging.getLogger(__name__) - - -@functools.total_ordering -class Version(BaseModel): - """ - A class to represent a BO4E version number. - """ - - version_pattern: ClassVar[re.Pattern[str]] = re.compile( - r"^v(?P\d{6})\.(?P\d+)\.(?P\d+)(?:-rc(?P\d+))?$" - ) - - major: int - functional: int - technical: int - candidate: Optional[int] = None - model_config = ConfigDict(frozen=True) - - @classmethod - def from_string(cls, version: str, allow_candidate: bool = False) -> "Version": - """ - Parse a version string and return a Version object. - Raises a ValueError if the version string does not match the expected pattern. - Raises a ValueError if allow_candidate is False and the version string contains a candidate version. - """ - match = cls.version_pattern.fullmatch(version) - if match is None: - raise ValueError(f"Expected version to match {cls.version_pattern}, got {version}") - inst = cls( - major=int(match.group("major")), - functional=int(match.group("functional")), - technical=int(match.group("technical")), - candidate=int(match.group("candidate")) if match.group("candidate") is not None else None, - ) - if not allow_candidate and inst.is_candidate(): - raise ValueError(f"Expected a version without candidate, got a candidate version: {version}") - return inst - - @property - def tag_name(self) -> str: - """ - Return the tag name for this version. - """ - return f"v{self.major}.{self.functional}.{self.technical}" + ( - f"-rc{self.candidate}" if self.is_candidate() else "" - ) - - def is_candidate(self) -> bool: - """ - Return True if this version is a candidate version. - """ - return self.candidate is not None - - def bumped_major(self, other: "Version") -> bool: - """ - Return True if this version is a major bump from the other version. - """ - return self.major > other.major - - def bumped_functional(self, other: "Version") -> bool: - """ - Return True if this version is a functional bump from the other version. - Return False if major bump is detected. - """ - return not self.bumped_major(other) and self.functional > other.functional - - def bumped_technical(self, other: "Version") -> bool: - """ - Return True if this version is a technical bump from the other version. - Return False if major or functional bump is detected. - """ - return not self.bumped_functional(other) and self.technical > other.technical - - def bumped_candidate(self, other: "Version") -> bool: - """ - Return True if this version is a candidate bump from the other version. - Return False if major, functional or technical bump is detected. - Raises ValueError if one of the versions is not a candidate version. - """ - if self.candidate is None or other.candidate is None: - raise ValueError("Cannot compare candidate versions if one of them is not a candidate.") - return not self.bumped_technical(other) and self.candidate > other.candidate - - def __eq__(self, other: object) -> bool: - if isinstance(other, Version): - return super().__eq__(other) - if isinstance(other, str): - return str(self) == other - return NotImplemented - - def __lt__(self, other: "Version") -> bool: - """ - This method asks: Is this (self) version older than the other version? - """ - if not isinstance(other, Version): - return NotImplemented - for attr in ["major", "functional", "technical"]: - if getattr(self, attr) != getattr(other, attr): - return getattr(self, attr) < getattr(other, attr) - if self.candidate != other.candidate: - return self.candidate is not None and (other.candidate is None or self.candidate < other.candidate) - return False # self == other - - def __str__(self) -> str: - return self.tag_name - - -def get_source_repo(gh_token: str | None = None) -> Repository: - """ - Get the BO4E-python repository from GitHub. - """ - if gh_token is not None: - gh = Github(auth=Token(gh_token)) - else: - gh = Github() - return gh.get_repo("bo4e/BO4E-python") - - -def get_latest_version(gh_token: str | None = None) -> Version: - """ - Get the release from BO4E-python repository which is marked as 'latest'. - """ - return Version.from_string(get_source_repo(gh_token).get_latest_release().tag_name) - - -def is_version_tag(value: str) -> bool: - """ - Check if value is a valid version tag and exists in repository. - """ - try: - Version.from_string(value, allow_candidate=True) - subprocess.check_call(["git", "show-ref", "--quiet", f"refs/tags/{value}"]) - except (ValueError, subprocess.CalledProcessError): - return False - return True - - -def is_branch(value: str) -> bool: - """ - Check if a branch is a valid branch name and exists in repository. - """ - try: - subprocess.check_call(["git", "show-ref", "--quiet", f"refs/remotes/origin/{value}"]) - return True - except subprocess.CalledProcessError: - return False - - -def get_branches_containing_commit(commit_id: str) -> Iterable[str]: - """ - Get all branches containing the commit id. - If the commit id is not found, a subprocess.CalledProcessError will be raised. - If the commit exists but is not on any branch (e.g. only on tags), an empty Iterable will be returned. - """ - cmd = ["git", "branch", "-a", "--contains", commit_id] - output = subprocess.check_output(cmd).decode().strip() - if output.startswith("error: no such commit"): - raise subprocess.CalledProcessError(1, cmd, output=output) - return (line.strip().lstrip("*").lstrip() for line in output.splitlines()) - - -def is_commit(value: str) -> bool: - """ - Check if value is a valid commit id. - """ - try: - if re.fullmatch(r"^[0-9a-f]{40}$", value) is None: - return False - _ = get_branches_containing_commit(value) - # If the commit ID doesn't exist, an error will be raised. - except subprocess.CalledProcessError: - return False - return True - - -def get_checkout_commit_id() -> str: - """ - Get the commit id of the current checkout. - """ - return subprocess.check_output(["git", "rev-parse", "HEAD"]).decode().strip() - - -def _get_ref(ref: str) -> tuple[Literal["tag", "branch", "commit"], str]: - """ - Get the type of reference and the reference itself. - """ - if is_version_tag(ref): - logger.info("Get tags before tag %s", ref) - return "tag", ref - if is_branch(ref): - logger.info("Get tags on branch %s", ref) - return "branch", ref - if is_commit(ref): - logger.info("Get tags before commit %s", ref) - return "commit", ref - cur_commit = get_checkout_commit_id() - logger.info( - "Supplied value (%s) is neither a tag, a branch nor a commit. Get tags before current checkout commit %s", - ref, - cur_commit, - ) - return "commit", cur_commit - - -def get_last_n_tags( - n: int, *, ref: str = "main", exclude_candidates: bool = True, exclude_technical_bumps: bool = False -) -> Iterable[str]: - """ - Get the last n tags in chronological descending order starting from `ref`. - If `ref` is a branch, it will start from the current HEAD of the branch. - If `ref` is a tag, it will start from the tag itself. But the tag itself will not be included in the output. - If `ref` is neither nor, the main branch will be used as fallback. - If `exclude_candidates` is True, candidate versions will be excluded from the output. - If the number of found versions is less than `n`, a warning will be logged. - If n=0, all versions since v202401.0.0 will be taken into account. - If exclude_technical_bumps is True, from each functional release group, - the highest technical release will be returned. - """ - version_threshold = "v202401.0.0" # Is used if n=0 - ref_type, reference = _get_ref(ref) - if n == 0: - logger.info("Get all tags since %s", version_threshold) - else: - logger.info("Get the last %d tags", n) - - logger.info("%s release candidates", "Exclude" if exclude_candidates else "Include") - logger.info("%s technical bumps", "Exclude" if exclude_technical_bumps else "Include") - output = ( - subprocess.check_output(["git", "tag", "--merged", reference, "--sort=-creatordate"]) - .decode() - .strip() - .splitlines() - ) - if len(output) == 0: - logger.warning("No tags found.") - return - last_version = Version.from_string(output[0], allow_candidate=True) - - counter = 0 - stop_iteration = False - for ind, tag in enumerate(output): - if counter >= n > 0: - stop_iteration = True - if stop_iteration: - return - if n == 0 and tag == version_threshold: - stop_iteration = True - version = Version.from_string(tag, allow_candidate=True) - # pylint: disable=too-many-boolean-expressions - if ( - exclude_candidates - and version.is_candidate() - or exclude_technical_bumps - and ind > 0 - and not last_version.bumped_functional(version) - and not last_version.bumped_major(version) - or ind == 0 - and ref_type == "tag" - ): - logger.info("Skipping version %s", version) - continue - logger.info("Yielding version %s", version) - yield tag - last_version = version - counter += 1 - if counter < n and 0 < n: - if ref_type == "tag": - logger.warning("Only found %d tags before tag %s, tried to retrieve %d", counter, ref, n) - else: - logger.warning("Only found %d tags on branch %s, tried to retrieve %d", counter, ref, n) - if n == 0: - logger.warning("Threshold version %s not found. Returned all tags.", version_threshold) - - -def get_last_version_before(version: Version) -> Version: - """ - Get the last non-candidate version before the provided version following the commit history. - """ - return Version.from_string(one(get_last_n_tags(1, ref=version.tag_name))) - - -def ensure_latest_on_main(latest_version: Version, is_cur_version_latest: bool) -> None: - """ - Ensure that the latest release is on the main branch. - Will also be called if the currently tagged version is marked as `latest`. - In this case both versions are equal. - - Note: This doesn't revert the release on GitHub. If you accidentally released on the wrong branch, you have to - manually mark an old or create a new release as `latest` on the main branch. Otherwise, the publish workflow - will fail here. - """ - commit_id = subprocess.check_output(["git", "rev-parse", f"tags/{latest_version.tag_name}~0"]).decode().strip() - branches_containing_commit = get_branches_containing_commit(commit_id) - if "remotes/origin/main" not in branches_containing_commit: - if is_cur_version_latest: - raise ValueError( - f"Tagged version {latest_version} is marked as latest but is not on main branch " - f"(branches {branches_containing_commit} contain commit {commit_id}).\n" - "Either tag on main branch or don't mark the release as latest.\n" - "If you accidentally marked the release as latest please remember to revert it. " - "Otherwise, the next publish workflow will fail as the latest version is assumed to be on main." - ) - raise ValueError( - f"Fatal Error: Latest release {latest_version.tag_name} is not on main branch " - f"(branches {branches_containing_commit} contain commit {commit_id}).\n" - "Please ensure that the latest release is on the main branch." - ) - - -def compare_work_tree_with_latest_version( - gh_version: str, gh_token: str | None = None, major_bump_allowed: bool = True -) -> None: - """ - Compare the work tree with the latest release from the BO4E repository. - If any inconsistency is detected, a Value- or an AssertionError will be raised. - """ - logger.info("Github Access Token %s", "provided" if gh_token is not None else "not provided") - cur_version = Version.from_string(gh_version, allow_candidate=True) - logger.info("Tagged release version: %s", cur_version) - latest_version = get_latest_version(gh_token) - logger.info("Got latest release version from GitHub: %s", latest_version) - is_cur_version_latest = cur_version == latest_version - if is_cur_version_latest: - logger.info("Tagged version is marked as latest.") - ensure_latest_on_main(latest_version, is_cur_version_latest) - logger.info("Latest release is on main branch.") - - version_ahead = cur_version - version_behind = get_last_version_before(cur_version) - logger.info( - "Comparing with the version before the tagged release (excluding release candidates): %s", - version_behind, - ) - - assert version_ahead > version_behind, f"Version did not increase: {version_ahead} <= {version_behind}" - - logger.info( - "Current version is ahead of the compared version. Comparing versions: %s -> %s", - version_behind, - version_ahead, - ) - if version_ahead.bumped_major(version_behind): - if not major_bump_allowed: - raise ValueError("Major bump detected. Major bump is not allowed.") - logger.info("Major version bump detected. No further checks needed.") - return - changes = list( - diff.compare_bo4e_versions(version_behind.tag_name, version_ahead.tag_name, gh_token=gh_token, from_local=True) - ) - logger.info("Check if functional or technical release bump is needed") - functional_changes = len(changes) > 0 - logger.info("%s release bump is needed", "Functional" if functional_changes else "Technical") - - if not functional_changes and version_ahead.bumped_functional(version_behind): - raise ValueError( - "Functional version bump detected but no functional changes found. " - "Please bump the technical release count instead of the functional." - ) - if functional_changes and not version_ahead.bumped_functional(version_behind): - raise ValueError( - "No functional version bump detected but functional changes found. " - "Please bump the functional release count.\n" - f"Detected changes: {changes}" - ) - - -@click.command() -@click.option("--gh-version", type=str, required=True, help="The new version to compare the latest release with.") -@click.option( - "--gh-token", type=str, default=None, help="GitHub Access token. This helps to avoid rate limiting errors." -) -@click.option( - "--major-bump-allowed/--major-bump-disallowed", - is_flag=True, - default=True, - help="Indicate if a major bump is allowed. " - "If it is not allowed, the script will exit with an error if a major bump is detected.", -) -def compare_work_tree_with_latest_version_cli( - gh_version: str, gh_token: str | None = None, major_bump_allowed: bool = True -) -> None: - """ - Check a version tag and compare the work tree with the latest release from the BO4E repository. - Exits with status code 1 iff the version is inconsistent with the commit history or if the detected changes in - the JSON-schemas are inconsistent with the version bump. - """ - try: - compare_work_tree_with_latest_version(gh_version, gh_token, major_bump_allowed) - except Exception as error: - logger.error("An error occurred.", exc_info=error) - raise click.exceptions.Exit(1) - logger.info("All checks passed.") - - -if __name__ == "__main__": - # pylint: disable=no-value-for-parameter - compare_work_tree_with_latest_version_cli() - - -def test_compare_work_tree_with_latest_version() -> None: - """ - Little test function for local testing. - """ - logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) - compare_work_tree_with_latest_version("v202401.1.2-rc3", gh_token=None) - - -def test_version() -> None: - """ - Test the total ordering of the Version class. - """ - # pylint: disable=unnecessary-negation - assert Version.from_string("v202401.1.2") == Version(major=202401, functional=1, technical=2) - assert Version.from_string("v202401.1.2-rc3", allow_candidate=True) == Version( - major=202401, functional=1, technical=2, candidate=3 - ) - assert Version.from_string("v202401.1.2") < Version.from_string("v202401.1.3") - assert Version.from_string("v202401.1.2") < Version.from_string("v202401.2.0") - assert not Version.from_string("v202401.2.0") < Version.from_string("v202401.1.2") - assert Version.from_string("v202401.2.0") > Version.from_string("v202401.1.2") - assert Version.from_string("v202401.1.2-rc3", allow_candidate=True) < Version.from_string("v202401.1.2") - assert Version.from_string("v202401.1.2-rc3", allow_candidate=True) <= Version.from_string("v202401.1.2") - assert not Version.from_string("v202401.1.2-rc3", allow_candidate=True) >= Version.from_string("v202401.1.2") - assert Version.from_string("v202401.1.2-rc3", allow_candidate=True) > Version.from_string("v202401.1.1") - assert Version.from_string("v202401.1.2-rc3", allow_candidate=True) > Version.from_string( - "v202401.1.2-rc1", allow_candidate=True - ) - assert Version.from_string("v202501.2.0") > Version.from_string("v202401.10.23") diff --git a/docs/conf.py b/docs/conf.py index 1512f1f41..3a97750c6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,6 +7,8 @@ # # All configuration values have a default; values that are commented out # serve to show the default. +import asyncio +import csv import inspect import os import shutil @@ -14,16 +16,31 @@ __location__ = os.path.join(os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe()))) +from itertools import pairwise + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. from pathlib import Path +from typing import Iterable + +from bo4e_cli.diff.diff import diff_schemas +from bo4e_cli.diff.matrix import create_compatibility_matrix, create_graph_from_changes, get_path_through_di_path_graph +from bo4e_cli.edit.update_refs import update_references_all_schemas +from bo4e_cli.io.changes import write_changes +from bo4e_cli.io.git import get_last_n_tags +from bo4e_cli.io.github import download_schemas +from bo4e_cli.io.matrix import write_compatibility_matrix_csv +from bo4e_cli.io.schemas import read_schemas, write_schemas +from bo4e_cli.models.changes import Changes +from bo4e_cli.models.meta import Schemas +from bo4e_cli.models.version import Version +from bo4e_cli.utils.github_cli import get_access_token_from_cli_if_installed sys.path.insert(0, os.path.join(__location__, "../src")) sys.path.insert(0, os.path.join(__location__, "../docs")) sys.path.insert(0, os.path.join(__location__, "../docs/compatibility")) import uml -from compatibility.__main__ import create_tables_for_doc # import package bo4e to clarify namespaces and prevent circular import errors from bo4e import * @@ -162,7 +179,6 @@ def setup(app): # documentation. html_theme_options = { "logo_only": False, - "display_version": True, "prev_next_buttons_location": "bottom", "style_external_links": False, # There is still a bug which will probably get fixed soon @@ -187,11 +203,11 @@ def setup(app): # be set by the action. This is to support things like /latest or /stable. if "release" not in globals(): release = os.getenv("SPHINX_DOCS_RELEASE") - if release is None: + if not release: from bo4e import __gh_version__ as release if "version" not in globals(): version = os.getenv("SPHINX_DOCS_VERSION") - if version is None: + if not version: from bo4e import __version__ as version print(f"Got version = {version} from __version__") @@ -324,7 +340,8 @@ def setup(app): # Create UML diagrams in plantuml format. Compile these into svg files into the _static folder. # See docs/uml.py for more details. -if release != "local": +release_version = Version.from_str(release) +if not release_version.is_dirty(): uml.LINK_URI_BASE = f"https://bo4e.github.io/BO4E-python/{release}" _exec_plantuml = Path(__location__) / "plantuml.jar" _network, _namespaces_to_parse = uml.build_network(Path(module_dir), uml.PlantUMLNetwork) @@ -336,7 +353,62 @@ def setup(app): print(f"Compiled uml files into svg using kroki.") # Create compatibility matrix +# CONSOLE.verbose = True compatibility_matrix_output_file = Path(__file__).parent / "_static/tables/compatibility_matrix.csv" -gh_token = os.getenv("GITHUB_ACCESS_TOKEN") or os.getenv("GITHUB_TOKEN") -create_tables_for_doc(compatibility_matrix_output_file, release, last_n_versions=0, gh_token=gh_token) +gh_token = os.getenv("GITHUB_ACCESS_TOKEN") or os.getenv("GITHUB_TOKEN") or get_access_token_from_cli_if_installed() + +compiling_from_release_workflow = not release_version.is_dirty() +last_versions = get_last_n_tags( + n=3, + ref=str(release) if compiling_from_release_workflow else "HEAD", + exclude_candidates=True, + exclude_technical_bumps=True, + token=gh_token, +) +schemas_base_dir = Path(__file__).parents[1] / "tmp/bo4e_json_schemas" +changes_base_dir = Path(__file__).parent / "_static/tables/changes" +current_json_schemas_dir = Path(__file__).parents[1] / "json_schemas" + + +async def download_missing_schemas(versions: Iterable[Version], gh_token: str | None = None) -> list[Schemas]: + schemas_list = [] + for _version in versions: + print(f"Checking for schemas of version {_version}...") + schemas_dir = schemas_base_dir / str(_version) + if not schemas_dir.exists(): + schemas_list.append(await download_schemas(_version, gh_token)) + update_references_all_schemas(schemas_list[-1]) + write_schemas(schemas_list[-1], schemas_dir) + else: + schemas_list.append(read_schemas(schemas_dir)) + return schemas_list + + +current_json_schemas = read_schemas(current_json_schemas_dir) +update_references_all_schemas(current_json_schemas) +schemas = [current_json_schemas, *asyncio.run(download_missing_schemas(last_versions, gh_token))] +changes = [diff_schemas(schemas_1, schemas_2) for schemas_1, schemas_2 in pairwise(reversed(schemas))] + + +def write_changes_table_csv(changes_iterable: Iterable[Changes], csv_file: Path) -> None: + with open(csv_file, "w", encoding="utf-8") as file: + csv_writer = csv.writer(file, delimiter=",", lineterminator="\n", escapechar="/") + csv_writer.writerow(("Old Version", "New Version", "Diff-file")) + for changes in changes_iterable: + changes_file = changes_base_dir / f"{changes.old_version}_to_{changes.new_version}.json" + write_changes(changes, changes_file) + changes_link_path = str(changes_file.relative_to(Path(__file__).parent).as_posix()) + print(f"Created changes file: {changes_link_path}") + csv_writer.writerow( + (changes.old_version, changes.new_version, f"`{changes_file.name} <{changes_link_path}>`__") + ) + print(f"Created changes table at {csv_file}") + + +graph = create_graph_from_changes(iter(changes)) +graph_path = get_path_through_di_path_graph(graph) +compatibility_matrix = create_compatibility_matrix(graph, graph_path, use_emotes=True) +write_compatibility_matrix_csv(compatibility_matrix_output_file, compatibility_matrix, graph_path) +write_changes_table_csv(changes, compatibility_matrix_output_file.parent / "changes_table.csv") + print(f"Created compatibility matrix at static folder {compatibility_matrix_output_file}") diff --git a/docs/requirements.in b/docs/requirements.in index f30e5b372..dca637f3c 100644 --- a/docs/requirements.in +++ b/docs/requirements.in @@ -6,5 +6,4 @@ requests Sphinx sphinx_rtd_theme typeguard -BO4E-Schema-Tool -click +BO4E-CLI diff --git a/docs/requirements.txt b/docs/requirements.txt index 0a0898067..0134bfd91 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -4,113 +4,190 @@ # # pip-compile '.\docs\requirements.in' # -alabaster==0.7.16 +alabaster==1.0.0 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic -babel==2.15.0 +anyio==4.9.0 + # via httpx +argcomplete==3.6.2 + # via datamodel-code-generator +autoflake==2.3.1 + # via bo4e-cli +babel==2.17.0 # via sphinx -bo4e-schema-tool==0.0.9 - # via -r requirements.in -certifi==2024.7.4 - # via requests -cffi==1.16.0 +black==25.1.0 + # via datamodel-code-generator +bo4e-cli==0.0.3 + # via -r .\docs\requirements.in +certifi==2025.7.9 + # via + # httpcore + # httpx + # requests +cffi==1.17.1 # via # cryptography # pynacl -charset-normalizer==2.1.0 +charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via - # -r requirements.in - # bo4e-schema-tool -cryptography==44.0.1 + # black + # typer +colorama==0.4.6 + # via + # click + # sphinx +cryptography==45.0.5 # via pyjwt -deprecated==1.2.14 +datamodel-code-generator==0.31.2 + # via bo4e-cli +deprecated==1.2.18 # via pygithub -docutils==0.20.1 +docutils==0.21.2 # via # sphinx # sphinx-rtd-theme -idna==3.7 - # via requests +genson==1.3.0 + # via datamodel-code-generator +greenlet==3.2.3 + # via sqlalchemy +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via bo4e-cli +idna==3.10 + # via + # anyio + # httpx + # requests imagesize==1.4.1 # via sphinx +inflect==7.5.0 + # via datamodel-code-generator iso3166==2.1.1 - # via -r requirements.in + # via -r .\docs\requirements.in +isort==6.0.1 + # via datamodel-code-generator jinja2==3.1.6 - # via sphinx -markupsafe==2.1.3 + # via + # datamodel-code-generator + # sphinx +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 # via jinja2 -more-itertools==10.2.0 - # via bo4e-schema-tool -networkx==3.4.2 - # via -r requirements.in -packaging==24.0 - # via sphinx -pycparser==2.21 +mdurl==0.1.2 + # via markdown-it-py +more-itertools==10.7.0 + # via + # bo4e-cli + # inflect +mypy-extensions==1.1.0 + # via black +networkx==3.5 + # via + # -r .\docs\requirements.in + # bo4e-cli +packaging==25.0 + # via + # black + # datamodel-code-generator + # sphinx +pathspec==0.12.1 + # via black +platformdirs==4.3.8 + # via black +pycparser==2.22 # via cffi -pydantic==2.8.2 +pydantic==2.11.7 # via - # -r requirements.in - # bo4e-schema-tool -pydantic-core==2.20.1 + # -r .\docs\requirements.in + # bo4e-cli + # datamodel-code-generator + # sqlmodel +pydantic-core==2.33.2 # via pydantic -pygithub==2.2.0 - # via bo4e-schema-tool -pygments==2.18.0 - # via sphinx +pyflakes==3.4.0 + # via autoflake +pygithub==2.6.1 + # via bo4e-cli +pygments==2.19.2 + # via + # rich + # sphinx pyhumps==3.8.0 - # via -r requirements.in -pyjwt[crypto]==2.8.0 + # via -r .\docs\requirements.in +pyjwt[crypto]==2.10.1 # via pygithub pynacl==1.5.0 # via pygithub +pyyaml==6.0.2 + # via datamodel-code-generator requests==2.32.4 # via - # -r requirements.in - # bo4e-schema-tool + # -r .\docs\requirements.in # pygithub # sphinx -snowballstemmer==2.2.0 +rich==14.0.0 + # via typer +roman-numerals-py==3.1.0 + # via sphinx +shellingham==1.5.4 + # via typer +sniffio==1.3.1 + # via anyio +snowballstemmer==3.0.1 # via sphinx -sphinx==7.4.7 +sphinx==8.2.3 # via - # -r requirements.in + # -r .\docs\requirements.in # sphinx-rtd-theme - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp # sphinxcontrib-jquery - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinx-rtd-theme==2.0.0 - # via -r requirements.in -sphinxcontrib-applehelp==1.0.7 +sphinx-rtd-theme==3.0.2 + # via -r .\docs\requirements.in +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jquery==4.1 # via sphinx-rtd-theme sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -typeguard==4.4.1 - # via -r requirements.in -typing-extensions==4.11.0 +sqlalchemy==2.0.41 + # via sqlmodel +sqlmodel==0.0.24 + # via bo4e-cli +typeguard==4.4.4 # via + # -r .\docs\requirements.in + # inflect +typer==0.16.0 + # via bo4e-cli +typing-extensions==4.14.1 + # via + # anyio # pydantic # pydantic-core # pygithub + # sqlalchemy # typeguard -urllib3==2.2.2 + # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # pygithub # requests -wrapt==1.16.0 +wrapt==1.17.2 # via deprecated diff --git a/generate_or_validate_json_schemas.py b/generate_or_validate_json_schemas.py index 42d7bd0b3..6441c0e74 100644 --- a/generate_or_validate_json_schemas.py +++ b/generate_or_validate_json_schemas.py @@ -14,12 +14,14 @@ from typing import Any, Iterator, Literal, cast import click +from bo4e_cli.io.version_file import create_version_file +from bo4e_cli.models.version import Version from pydantic import BaseModel, TypeAdapter from pydantic.json_schema import GenerateJsonSchema as _GenerateJsonSchema from pydantic.json_schema import JsonSchemaValue from pydantic_core import core_schema -from bo4e import ZusatzAttribut +from bo4e import ZusatzAttribut, __gh_version__ logging.basicConfig(level=logging.INFO, stream=sys.stdout) _logger = logging.getLogger(__name__) @@ -115,10 +117,21 @@ def get_schema_json_dict(cls: Any) -> dict[str, Any]: reference_match = reference_pattern.fullmatch(schema_json_dict["allOf"][0]["$ref"]) assert ( reference_match is not None - ), "Internal Error: Reference string has unexpected format: {schema_json_dict['allOf'][0]['$ref']}" + ), f"Internal Error: Reference string has unexpected format: {schema_json_dict['allOf'][0]['$ref']}" schema_json_dict_to_merge = schema_json_dict["$defs"][reference_match.group("cls_name")] del schema_json_dict["allOf"] schema_json_dict.update(schema_json_dict_to_merge) + if {"$ref", "$defs"} == set(schema_json_dict.keys()): + # The newer version of pydantic sometimes generates a schema with only a $ref and a $defs key where the $ref + # field points to the actual schema definition in the $defs field. + reference_pattern = re.compile(r"^#/\$defs/(?P\w+)$") + reference_match = reference_pattern.fullmatch(schema_json_dict["$ref"]) + assert ( + reference_match is not None + ), f"Internal Error: Reference string has unexpected format: {schema_json_dict['$ref']}" + schema_json_dict_to_merge = schema_json_dict["$defs"][reference_match.group("cls_name")] + del schema_json_dict["$ref"] + schema_json_dict.update(schema_json_dict_to_merge) if "$defs" in schema_json_dict: del schema_json_dict["$defs"] return schema_json_dict @@ -148,6 +161,15 @@ def generate_schema(file_path: Path, schema_json_dict: dict[str, Any]) -> None: json_schema_file.write("\n") +def generate_version_file(file_path: Path, version: str) -> None: + """ + Generate a version file with the given version + """ + with open(file_path, "w+", encoding="utf-8") as version_file: + version_file.write(version) + _logger.info("Generated version file at %s with content %s", file_path, version) + + def replace_refs( schema_json_dict: dict[str, Any], namespace: dict[str, tuple[str, str, PARSABLE_CLASS_TYPE]], target_version: str ) -> None: @@ -200,11 +222,12 @@ def traverse_dict(obj: dict[str, Any]) -> None: required=False, type=click.STRING, envvar="TARGET_VERSION", - default="v0.0.0", + default=None, ) -def generate_or_validate_json_schemas(mode: Literal["validate", "generate"], target_version: str) -> None: +def generate_or_validate_json_schemas(mode: Literal["validate", "generate"], target_version: str | None) -> None: """generate json schemas for all BOs and COMs""" - _logger.info("Mode: %s, target version: %s", mode, target_version) + version = Version.from_str(target_version or __gh_version__) + _logger.info("Mode: %s, target version: %s", mode, version) packages = ["bo", "com", "enum"] if mode == "generate": @@ -224,7 +247,7 @@ def generate_or_validate_json_schemas(mode: Literal["validate", "generate"], tar file_path = output_directory / pkg / (name + ".json") schema_json_dict = get_schema_json_dict(cls) - replace_refs(schema_json_dict, namespace, target_version) + replace_refs(schema_json_dict, namespace, str(version)) if mode == "validate": validate_schema(file_path, schema_json_dict, name) @@ -233,6 +256,9 @@ def generate_or_validate_json_schemas(mode: Literal["validate", "generate"], tar _logger.info("Generated schema for %s", name) else: raise ValueError(f"Unknown mode '{mode}'") + if mode == "generate": + create_version_file(output_directory, version) + _logger.info("Generated version file at %s with version %s", output_directory / ".version", version) if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index 2a0333f7b..1cf465500 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,15 @@ fragments = [{ path = "README.rst" }] [tool.hatch.version] source = "vcs" +[tool.hatch.version.raw-options] +# version_scheme = "no-guess-dev" +version_scheme = "only-version" +local_scheme = "node-and-date" +# See https://setuptools-scm.readthedocs.io/en/latest/extending/#version-number-construction +# for details of how the version number is constructed. +# Note that the BO4E CLI which is used in the CI expects the version to be in the format how it currently is. +# If you which to change the version format, you will also have to change the CLI code. + [tool.hatch.build.hooks.vcs] version-file = "src/_bo4e_python_version.py" template = ''' diff --git a/requirements.txt b/requirements.txt index cc5fbe406..10d5dbd41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,17 +4,20 @@ # # pip-compile requirements.in # -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic iso3166==2.1.1 # via -r requirements.in -pydantic==2.8.2 +pydantic==2.11.7 # via -r requirements.in -pydantic-core==2.20.1 +pydantic-core==2.33.2 # via pydantic pyhumps==3.8.0 # via -r requirements.in -typing-extensions==4.11.0 +typing-extensions==4.14.1 # via # pydantic # pydantic-core + # typing-inspection +typing-inspection==0.4.1 + # via pydantic diff --git a/src/bo4e/version.py b/src/bo4e/version.py index ba5d42269..62fd461c8 100644 --- a/src/bo4e/version.py +++ b/src/bo4e/version.py @@ -7,17 +7,43 @@ try: __version__ = version("bo4e") + # Please keep this name in sync with the name of the project in pyproject.toml + # This name is the name of the package on pypi.org except PackageNotFoundError: __version__ = "0.0.0" -# Please keep this name in sync with the name of the project in pyproject.toml -# This name is the name of the package on pypi.org -if re.match(r"^(\d+\.\d+\.\d+)(rc\d+)?$", __version__): - def _repl(match: re.Match[str]) -> str: - if match.group(2) is not None: - return f"v{match.group(1)}-{match.group(2)}" - return f"v{match.group(1)}" - __gh_version__ = re.sub(r"^(\d+\.\d+\.\d+)(rc\d+)?$", _repl, __version__) -else: - __gh_version__ = f"v{__version__}" +def _parse_version_to_gh_version(version_str: str) -> str: + """ + Parse a version string into a GitHub version string. + E.g. '202401.0.1-rc8+dev12asdf34' becomes 'v202401.0.1-rc8'. + """ + _regex_version = re.compile( + r"^(?P\d{6})\." + r"(?P\d+)\." + r"(?P\d+)" + r"(?:rc(?P\d*))?" + r"(?:\+g(?P\w+)" + r"(?:\.d(?P\d{4})" + r"(?P\d{2})" + r"(?P\d{2}))?)?$" + ) + match = _regex_version.match(version_str) + if match is None: + raise ValueError(f"Invalid version string: {version_str}") + + return ( + f"v{match.group('major')}.{match.group('functional')}.{match.group('technical')}" + + (f"-rc{match.group('candidate')}" if match.group("candidate") else "") + + (f"+g{match.group('commit_part')}" if match.group("commit_part") else "") + + ( + f".d{match.group('dirty_workdir_date_year')}" + f"{match.group('dirty_workdir_date_month')}" + f"{match.group('dirty_workdir_date_day')}" + if match.group("dirty_workdir_date_year") + else "" + ) + ) + + +__gh_version__ = _parse_version_to_gh_version(__version__) diff --git a/tox.ini b/tox.ini index bfe8aaccb..75fa4bbfd 100644 --- a/tox.ini +++ b/tox.ini @@ -28,7 +28,6 @@ deps = commands = pylint src/bo4e pylint docs/uml.py - pylint docs/compatibility pylint generate_or_validate_json_schemas.py [testenv:type_check] @@ -43,7 +42,6 @@ commands = mypy --show-error-codes src/bo4e mypy --show-error-codes tests mypy --show-error-codes docs/uml.py - mypy --show-error-codes docs/compatibility mypy --show-error-codes generate_or_validate_json_schemas.py # add single files (ending with .py) or packages here @@ -85,9 +83,9 @@ deps = {[testenv:generate_json_schemas]deps} # any dependency added here should also be added in docs/requirements.in and docs/requirements.txt respectively setenv = - TARGET_VERSION = {env:TARGET_VERSION:local} - SPHINX_DOCS_RELEASE = {env:TARGET_VERSION:local} - SPHINX_DOCS_VERSION = {env:TARGET_VERSION:local} + TARGET_VERSION = {env:TARGET_VERSION:} + SPHINX_DOCS_RELEASE = {env:TARGET_VERSION:} + SPHINX_DOCS_VERSION = {env:TARGET_VERSION:} commands = {[testenv:generate_json_schemas]commands} sphinx-build -T -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html