diff --git a/.gitignore b/.gitignore index 038b42b7..ffa94acc 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,5 @@ offline/ # Vagrant Vagrantfile + +.sourcery* diff --git a/CHANGELOG.md b/CHANGELOG.md index cea2e9f5..8565f898 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ * Add support for defining Simvue run defaults using `tool.simvue` in a project `pyproject.toml` file. * Drop support for INI based configuration files. * Retrieve all metric values if `max_points` is unspecified or set to `None`. +* Add support for PyTorch in Python 3.13 +* Create lower level API for directly interacting with the Simvue RestAPI endpoints. +* **Removes support for Python <3.10 due to dependency constraints.** +* Separates `create_alert` into specific methods `create_event_alert` etc. ## [v1.1.4](https://github.com/simvue-io/client/releases/tag/v1.1.4) - 2024-12-11 * Remove incorrect identifier reference for latest Simvue servers during reconnection. diff --git a/README.md b/README.md index 98a6bbbd..974a361e 100644 --- a/README.md +++ b/README.md @@ -76,14 +76,15 @@ if __name__ == "__main__": run.save_file('params.in', 'input') # Add an alert (the alert definition will be created if necessary) - run.create_alert(name='loss-too-high', # Name - source='metrics', # Source - rule='is above', # Rule - metric='loss', # Metric - frequency=1, # Frequency - window=1, # Window - threshold=10, # Threshold - notification='email') # Notification type + run.create_metric_threshold_alert( + name='loss-too-high', # Name + rule='is above', # Rule + metric='loss', # Metric + frequency=1, # Frequency + window=1, # Window + threshold=10, # Threshold + notification='email' # Notification type + ) ... diff --git a/examples/Geant4/geant4_simvue.py b/examples/Geant4/geant4_simvue.py index 3d3f9dfb..2a864e0f 100644 --- a/examples/Geant4/geant4_simvue.py +++ b/examples/Geant4/geant4_simvue.py @@ -27,7 +27,7 @@ @click.option("--momentum", type=float, default=10) @click.option("--events", type=int, default=100) def geant4_simvue_example( - g4_binary: str, config: typing.Optional[str], ci: bool, momentum: float, events: int + g4_binary: str, config: str | None, ci: bool, momentum: float, events: int ) -> None: @mp_file_parse.file_parser def root_file_parser( diff --git a/poetry.lock b/poetry.lock index ff72a157..504488ee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,8 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +19,8 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -39,6 +43,8 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -52,15 +58,38 @@ types-python-dateutil = ">=2.8.10" doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] +[[package]] +name = "attrs" +version = "25.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + [[package]] name = "certifi" -version = "2024.12.14" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, - {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -69,6 +98,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -148,6 +179,8 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -249,6 +282,8 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -263,6 +298,8 @@ version = "2.8.3" description = "" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "codecarbon-2.8.3-py3-none-any.whl", hash = "sha256:d3204852ad0c83d94d0f16b7d922e7f540c1e5f488d911f3e75408fe29f4ef4c"}, {file = "codecarbon-2.8.3.tar.gz", hash = "sha256:037dd5afa1c5f60154f893ecd1631e0c849786edcfc9ff34a7ef467707891269"}, @@ -292,10 +329,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "contourpy" @@ -303,6 +342,8 @@ version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -376,6 +417,8 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -453,6 +496,8 @@ version = "44.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, @@ -460,6 +505,7 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -470,6 +516,7 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -502,6 +549,8 @@ version = "0.12.1" description = "Composable style cycles" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -512,19 +561,43 @@ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] -name = "dill" -version = "0.3.9" -description = "serialize all of Python" +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" [[package]] name = "exceptiongroup" @@ -532,6 +605,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -546,6 +621,8 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -560,6 +637,8 @@ version = "0.20.0" description = "Fief Client for Python" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fief_client-0.20.0-py3-none-any.whl", hash = "sha256:425f40cc7c45c651daec63da402e033c53d91dcaa3f9bf208873fd8692fc16dc"}, {file = "fief_client-0.20.0.tar.gz", hash = "sha256:dbfb906d03c4a5402ceac5c843aa4708535fb6f5d5c1c4e263ec06fbbbc434d7"}, @@ -581,6 +660,8 @@ version = "0.7.0" description = "A library for automatically generating command line interfaces." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fire-0.7.0.tar.gz", hash = "sha256:961550f07936eaf65ad1dc8360f2b2bf8408fad46abbfa4d2a3794f8d2a95cdf"}, ] @@ -588,63 +669,77 @@ files = [ [package.dependencies] termcolor = "*" +[[package]] +name = "flatdict" +version = "4.0.1" +description = "Python module for interacting with nested dicts as a single level dict with delimited keys." +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "flatdict-4.0.1.tar.gz", hash = "sha256:cd32f08fd31ed21eb09ebc76f06b6bd12046a24f77beb1fd0281917e47f26742"}, +] + [[package]] name = "fonttools" -version = "4.55.3" +version = "4.55.8" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" -files = [ - {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0"}, - {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f"}, - {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e4ae3592e62eba83cd2c4ccd9462dcfa603ff78e09110680a5444c6925d841"}, - {file = "fonttools-4.55.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d65a3022c35e404d19ca14f291c89cc5890032ff04f6c17af0bd1927299674"}, - {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d342e88764fb201286d185093781bf6628bbe380a913c24adf772d901baa8276"}, - {file = "fonttools-4.55.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd68c87a2bfe37c5b33bcda0fba39b65a353876d3b9006fde3adae31f97b3ef5"}, - {file = "fonttools-4.55.3-cp310-cp310-win32.whl", hash = "sha256:1bc7ad24ff98846282eef1cbeac05d013c2154f977a79886bb943015d2b1b261"}, - {file = "fonttools-4.55.3-cp310-cp310-win_amd64.whl", hash = "sha256:b54baf65c52952db65df39fcd4820668d0ef4766c0ccdf32879b77f7c804d5c5"}, - {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c4491699bad88efe95772543cd49870cf756b019ad56294f6498982408ab03e"}, - {file = "fonttools-4.55.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5323a22eabddf4b24f66d26894f1229261021dacd9d29e89f7872dd8c63f0b8b"}, - {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5480673f599ad410695ca2ddef2dfefe9df779a9a5cda89503881e503c9c7d90"}, - {file = "fonttools-4.55.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da9da6d65cd7aa6b0f806556f4985bcbf603bf0c5c590e61b43aa3e5a0f822d0"}, - {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e894b5bd60d9f473bed7a8f506515549cc194de08064d829464088d23097331b"}, - {file = "fonttools-4.55.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aee3b57643827e237ff6ec6d28d9ff9766bd8b21e08cd13bff479e13d4b14765"}, - {file = "fonttools-4.55.3-cp311-cp311-win32.whl", hash = "sha256:eb6ca911c4c17eb51853143624d8dc87cdcdf12a711fc38bf5bd21521e79715f"}, - {file = "fonttools-4.55.3-cp311-cp311-win_amd64.whl", hash = "sha256:6314bf82c54c53c71805318fcf6786d986461622dd926d92a465199ff54b1b72"}, - {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f9e736f60f4911061235603a6119e72053073a12c6d7904011df2d8fad2c0e35"}, - {file = "fonttools-4.55.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a8aa2c5e5b8b3bcb2e4538d929f6589a5c6bdb84fd16e2ed92649fb5454f11c"}, - {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07f8288aacf0a38d174445fc78377a97fb0b83cfe352a90c9d9c1400571963c7"}, - {file = "fonttools-4.55.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d5e8916c0970fbc0f6f1bece0063363bb5857a7f170121a4493e31c3db3314"}, - {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae3b6600565b2d80b7c05acb8e24d2b26ac407b27a3f2e078229721ba5698427"}, - {file = "fonttools-4.55.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54153c49913f45065c8d9e6d0c101396725c5621c8aee744719300f79771d75a"}, - {file = "fonttools-4.55.3-cp312-cp312-win32.whl", hash = "sha256:827e95fdbbd3e51f8b459af5ea10ecb4e30af50221ca103bea68218e9615de07"}, - {file = "fonttools-4.55.3-cp312-cp312-win_amd64.whl", hash = "sha256:e6e8766eeeb2de759e862004aa11a9ea3d6f6d5ec710551a88b476192b64fd54"}, - {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a430178ad3e650e695167cb53242dae3477b35c95bef6525b074d87493c4bf29"}, - {file = "fonttools-4.55.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:529cef2ce91dc44f8e407cc567fae6e49a1786f2fefefa73a294704c415322a4"}, - {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e75f12c82127486fac2d8bfbf5bf058202f54bf4f158d367e41647b972342ca"}, - {file = "fonttools-4.55.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:859c358ebf41db18fb72342d3080bce67c02b39e86b9fbcf1610cca14984841b"}, - {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:546565028e244a701f73df6d8dd6be489d01617863ec0c6a42fa25bf45d43048"}, - {file = "fonttools-4.55.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:aca318b77f23523309eec4475d1fbbb00a6b133eb766a8bdc401faba91261abe"}, - {file = "fonttools-4.55.3-cp313-cp313-win32.whl", hash = "sha256:8c5ec45428edaa7022f1c949a632a6f298edc7b481312fc7dc258921e9399628"}, - {file = "fonttools-4.55.3-cp313-cp313-win_amd64.whl", hash = "sha256:11e5de1ee0d95af4ae23c1a138b184b7f06e0b6abacabf1d0db41c90b03d834b"}, - {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:caf8230f3e10f8f5d7593eb6d252a37caf58c480b19a17e250a63dad63834cf3"}, - {file = "fonttools-4.55.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b586ab5b15b6097f2fb71cafa3c98edfd0dba1ad8027229e7b1e204a58b0e09d"}, - {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8c2794ded89399cc2169c4d0bf7941247b8d5932b2659e09834adfbb01589aa"}, - {file = "fonttools-4.55.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf4fe7c124aa3f4e4c1940880156e13f2f4d98170d35c749e6b4f119a872551e"}, - {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:86721fbc389ef5cc1e2f477019e5069e8e4421e8d9576e9c26f840dbb04678de"}, - {file = "fonttools-4.55.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:89bdc5d88bdeec1b15af790810e267e8332d92561dce4f0748c2b95c9bdf3926"}, - {file = "fonttools-4.55.3-cp38-cp38-win32.whl", hash = "sha256:bc5dbb4685e51235ef487e4bd501ddfc49be5aede5e40f4cefcccabc6e60fb4b"}, - {file = "fonttools-4.55.3-cp38-cp38-win_amd64.whl", hash = "sha256:cd70de1a52a8ee2d1877b6293af8a2484ac82514f10b1c67c1c5762d38073e56"}, - {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bdcc9f04b36c6c20978d3f060e5323a43f6222accc4e7fcbef3f428e216d96af"}, - {file = "fonttools-4.55.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3ca99e0d460eff46e033cd3992a969658c3169ffcd533e0a39c63a38beb6831"}, - {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22f38464daa6cdb7b6aebd14ab06609328fe1e9705bb0fcc7d1e69de7109ee02"}, - {file = "fonttools-4.55.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed63959d00b61959b035c7d47f9313c2c1ece090ff63afea702fe86de00dbed4"}, - {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5e8d657cd7326eeaba27de2740e847c6b39dde2f8d7cd7cc56f6aad404ddf0bd"}, - {file = "fonttools-4.55.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fb594b5a99943042c702c550d5494bdd7577f6ef19b0bc73877c948a63184a32"}, - {file = "fonttools-4.55.3-cp39-cp39-win32.whl", hash = "sha256:dc5294a3d5c84226e3dbba1b6f61d7ad813a8c0238fceea4e09aa04848c3d851"}, - {file = "fonttools-4.55.3-cp39-cp39-win_amd64.whl", hash = "sha256:aedbeb1db64496d098e6be92b2e63b5fac4e53b1b92032dfc6988e1ea9134a4d"}, - {file = "fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977"}, - {file = "fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45"}, +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" +files = [ + {file = "fonttools-4.55.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d11600f5343092697d7434f3bf77a393c7ae74be206fe30e577b9a195fd53165"}, + {file = "fonttools-4.55.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c96f2506ce1a0beeaa9595f9a8b7446477eb133f40c0e41fc078744c28149f80"}, + {file = "fonttools-4.55.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b5f05ef72e846e9f49ccdd74b9da4309901a4248434c63c1ee9321adcb51d65"}, + {file = "fonttools-4.55.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba45b637da80a262b55b7657aec68da2ac54b8ae7891cd977a5dbe5fd26db429"}, + {file = "fonttools-4.55.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:edcffaeadba9a334c1c3866e275d7dd495465e7dbd296f688901bdbd71758113"}, + {file = "fonttools-4.55.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b9f9fce3c9b2196e162182ec5db8af8eb3acd0d76c2eafe9fdba5f370044e556"}, + {file = "fonttools-4.55.8-cp310-cp310-win32.whl", hash = "sha256:f089e8da0990cfe2d67e81d9cf581ff372b48dc5acf2782701844211cd1f0eb3"}, + {file = "fonttools-4.55.8-cp310-cp310-win_amd64.whl", hash = "sha256:01ea3901b0802fc5f9e854f5aeb5bc27770dd9dd24c28df8f74ba90f8b3f5915"}, + {file = "fonttools-4.55.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:95f5a1d4432b3cea6571f5ce4f4e9b25bf36efbd61c32f4f90130a690925d6ee"}, + {file = "fonttools-4.55.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d20f152de7625a0008ba1513f126daaaa0de3b4b9030aa72dd5c27294992260"}, + {file = "fonttools-4.55.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5a3ff5bb95fd5a3962b2754f8435e6d930c84fc9e9921c51e802dddf40acd56"}, + {file = "fonttools-4.55.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99d4fd2b6d0a00c7336c8363fccc7a11eccef4b17393af75ca6e77cf93ff413"}, + {file = "fonttools-4.55.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d637e4d33e46619c79d1a6c725f74d71b574cd15fb5bbb9b6f3eba8f28363573"}, + {file = "fonttools-4.55.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0f38bfb6b7a39c4162c3eb0820a0bdf8e3bdd125cd54e10ba242397d15e32439"}, + {file = "fonttools-4.55.8-cp311-cp311-win32.whl", hash = "sha256:acfec948de41cd5e640d5c15d0200e8b8e7c5c6bb82afe1ca095cbc4af1188ee"}, + {file = "fonttools-4.55.8-cp311-cp311-win_amd64.whl", hash = "sha256:604c805b41241b4880e2dc86cf2d4754c06777371c8299799ac88d836cb18c3b"}, + {file = "fonttools-4.55.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63403ee0f2fa4e1de28e539f8c24f2bdca1d8ecb503fa9ea2d231d9f1e729809"}, + {file = "fonttools-4.55.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:302e1003a760b222f711d5ba6d1ad7fd5f7f713eb872cd6a3eb44390bc9770af"}, + {file = "fonttools-4.55.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e72a7816ff8a759be9ca36ca46934f8ccf4383711ef597d9240306fe1878cb8d"}, + {file = "fonttools-4.55.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03c2b50b54e6e8b3564b232e57e8f58be217cf441cf0155745d9e44a76f9c30f"}, + {file = "fonttools-4.55.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7230f7590f9570d26ee903b6a4540274494e200fae978df0d9325b7b9144529"}, + {file = "fonttools-4.55.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:466a78984f0572305c3c48377f4e3f7f4e909f1209f45ef8e7041d5c8a744a56"}, + {file = "fonttools-4.55.8-cp312-cp312-win32.whl", hash = "sha256:243cbfc0b7cb1c307af40e321f8343a48d0a080bc1f9466cf2b5468f776ef108"}, + {file = "fonttools-4.55.8-cp312-cp312-win_amd64.whl", hash = "sha256:a19059aa892676822c1f05cb5a67296ecdfeb267fe7c47d4758f3e8e942c2b2a"}, + {file = "fonttools-4.55.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:332883b6280b9d90d2ba7e9e81be77cf2ace696161e60cdcf40cfcd2b3ed06fa"}, + {file = "fonttools-4.55.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6b8d7c149d47b47de7ec81763396c8266e5ebe2e0b14aa9c3ccf29e52260ab2f"}, + {file = "fonttools-4.55.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dfae7c94987149bdaa0388e6c937566aa398fa0eec973b17952350a069cff4e"}, + {file = "fonttools-4.55.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0fe12f06169af2fdc642d26a8df53e40adc3beedbd6ffedb19f1c5397b63afd"}, + {file = "fonttools-4.55.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f971aa5f50c22dc4b63a891503624ae2c77330429b34ead32f23c2260c5618cd"}, + {file = "fonttools-4.55.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:708cb17b2590b7f6c6854999df0039ff1140dda9e6f56d67c3599ba6f968fab5"}, + {file = "fonttools-4.55.8-cp313-cp313-win32.whl", hash = "sha256:cfe9cf30f391a0f2875247a3e5e44d8dcb61596e5cf89b360cdffec8a80e9961"}, + {file = "fonttools-4.55.8-cp313-cp313-win_amd64.whl", hash = "sha256:1e10efc8ee10d6f1fe2931d41bccc90cd4b872f2ee4ff21f2231a2c293b2dbf8"}, + {file = "fonttools-4.55.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9b6fcff4dc755b32faff955d989ee26394ddad3a90ea7d558db17a4633c8390c"}, + {file = "fonttools-4.55.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:02c41322e5bdcb484b61b776fcea150215c83619b39c96aa0b44d4fd87bb5574"}, + {file = "fonttools-4.55.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9164f44add0acec0f12fce682824c040dc52e483bfe3838c37142897150c8364"}, + {file = "fonttools-4.55.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2248ebfbcea0d0b3cb459d76a9f67f2eadc10ec0d07e9cadab8777d3f016bf2"}, + {file = "fonttools-4.55.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3461347016c94cb42b36caa907e11565878c4c2c375604f3651d11dc06d1ab3e"}, + {file = "fonttools-4.55.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:67df1c3935838fb9e56f227d7f506c9043b149a4a3b667bef17929c7a1114d19"}, + {file = "fonttools-4.55.8-cp38-cp38-win32.whl", hash = "sha256:cb121d6dd34625cece32234a5fa0359475bb118838b6b4295ffdb13b935edb04"}, + {file = "fonttools-4.55.8-cp38-cp38-win_amd64.whl", hash = "sha256:285c1ac10c160fbdff6d05358230e66c4f98cbbf271f3ec7eb34e967771543e8"}, + {file = "fonttools-4.55.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8abd135e427d88e461a4833c03cf96cfb9028c78c15d58123291f22398e25492"}, + {file = "fonttools-4.55.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65cb8f97eed7906dcf19bc2736b70c6239e9d7e77aad7c6110ba7239ae082e81"}, + {file = "fonttools-4.55.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450c354c04a6e12a3db968e915fe05730f79ff3d39560947ef8ee6eaa2ab2212"}, + {file = "fonttools-4.55.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2232012a1502b2b8ab4c6bc1d3524bfe90238c0c1a50ac94a0a2085aa87a58a5"}, + {file = "fonttools-4.55.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d39f0c977639be0f9f5505d4c7c478236737f960c567a35f058649c056e41434"}, + {file = "fonttools-4.55.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:de78d6d0dbe32561ce059265437021f4746e56073c4799f0f1095828ae7232bd"}, + {file = "fonttools-4.55.8-cp39-cp39-win32.whl", hash = "sha256:bf4b5b3496ddfdd4e57112e77ec51f1ab388d35ac17322c1248addb2eb0d429a"}, + {file = "fonttools-4.55.8-cp39-cp39-win_amd64.whl", hash = "sha256:ccf8ae02918f431953d338db4d0a675a395faf82bab3a76025582cf32a2f3b7b"}, + {file = "fonttools-4.55.8-py3-none-any.whl", hash = "sha256:07636dae94f7fe88561f9da7a46b13d8e3f529f87fdb221b11d85f91eabceeb7"}, + {file = "fonttools-4.55.8.tar.gz", hash = "sha256:54d481d456dcd59af25d4a9c56b2c4c3f20e9620b261b84144e5950f33e8df17"}, ] [package.extras] @@ -667,6 +762,8 @@ version = "4.0.12" description = "Git Object Database" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, @@ -681,6 +778,8 @@ version = "3.1.44" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, @@ -699,6 +798,8 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -710,6 +811,8 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -731,6 +834,8 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -756,6 +861,8 @@ version = "10.0" description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -770,6 +877,8 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -784,17 +893,48 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "interrogate" +version = "1.7.0" +description = "Interrogate a codebase for docstring coverage." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "interrogate-1.7.0-py3-none-any.whl", hash = "sha256:b13ff4dd8403369670e2efe684066de9fcb868ad9d7f2b4095d8112142dc9d12"}, + {file = "interrogate-1.7.0.tar.gz", hash = "sha256:a320d6ec644dfd887cc58247a345054fc4d9f981100c45184470068f4b3719b0"}, +] + +[package.dependencies] +attrs = "*" +click = ">=7.1" +colorama = "*" +py = "*" +tabulate = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["cairosvg", "coverage[toml]", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-autobuild", "wheel"] +docs = ["sphinx", "sphinx-autobuild"] +png = ["cairosvg"] +tests = ["coverage[toml]", "pytest", "pytest-cov", "pytest-mock"] + [[package]] name = "jinja2" version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -812,6 +952,8 @@ version = "1.5.6" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, @@ -827,6 +969,8 @@ version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -916,6 +1060,8 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -940,6 +1086,8 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1010,6 +1158,8 @@ version = "3.10.0" description = "Python plotting package" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "matplotlib-3.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2c5829a5a1dd5a71f0e31e6e8bb449bc0ee9dbfb05ad28fc0c6b55101b3a4be6"}, {file = "matplotlib-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2a43cbefe22d653ab34bb55d42384ed30f611bcbdea1f8d7f431011a2e1c62e"}, @@ -1067,6 +1217,8 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1078,6 +1230,8 @@ version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -1147,13 +1301,15 @@ files = [ [[package]] name = "narwhals" -version = "1.24.1" +version = "1.24.2" description = "Extremely lightweight compatibility layer between dataframe libraries" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ - {file = "narwhals-1.24.1-py3-none-any.whl", hash = "sha256:d8983fe14851c95d60576ddca37c094bd4ed24ab9ea98396844fb20ad9aaf184"}, - {file = "narwhals-1.24.1.tar.gz", hash = "sha256:b09b8253d945f23cdb683a84685abf3afb9f96114d89e9f35dc876e143f65007"}, + {file = "narwhals-1.24.2-py3-none-any.whl", hash = "sha256:5d70dda4c032c86d0bda3f4d60a9d3f8dbd7d83deb8d23d68dd2f2eb3bc86417"}, + {file = "narwhals-1.24.2.tar.gz", hash = "sha256:c7bbb96ec8efb22ee384872e4a57cff591df653dc49067c314f88e2a54d8a76b"}, ] [package.extras] @@ -1177,6 +1333,8 @@ version = "2.2.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, @@ -1237,13 +1395,15 @@ files = [ [[package]] name = "nvidia-ml-py" -version = "12.560.30" +version = "12.570.86" description = "Python Bindings for the NVIDIA Management Library" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "nvidia-ml-py-12.560.30.tar.gz", hash = "sha256:f0254dc7400647680a072ee02509bfd46102b60bdfeca321576d4d4817e7fe97"}, - {file = "nvidia_ml_py-12.560.30-py3-none-any.whl", hash = "sha256:fea371c94d63e38a611c17bbb85fe400e9c8ddb9e8684a9cd0e47786a4bc3c73"}, + {file = "nvidia_ml_py-12.570.86-py3-none-any.whl", hash = "sha256:58907de35a845abd13dcb227f18298f3b5dd94a72d04c9e594e77711e95c0b51"}, + {file = "nvidia_ml_py-12.570.86.tar.gz", hash = "sha256:0508d4a0c7b6d015cf574530b95a62ed4fc89da3b8b47e1aefe6777db170ec8b"}, ] [[package]] @@ -1252,10 +1412,12 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +markers = {main = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "pandas" @@ -1263,6 +1425,8 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -1349,6 +1513,8 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -1437,6 +1603,8 @@ version = "6.0.0" description = "An open-source, interactive data visualization library for Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "plotly-6.0.0-py3-none-any.whl", hash = "sha256:f708871c3a9349a68791ff943a5781b1ec04de7769ea69068adcd9202e57653a"}, {file = "plotly-6.0.0.tar.gz", hash = "sha256:c4aad38b8c3d65e4a5e7dd308b084143b9025c2cc9d5317fc1f1d30958db87d3"}, @@ -1455,6 +1623,8 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1470,6 +1640,8 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -1484,6 +1656,8 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -1498,6 +1672,8 @@ version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -1522,12 +1698,27 @@ files = [ dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -1539,6 +1730,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1550,6 +1743,8 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1570,6 +1765,8 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1682,6 +1879,8 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1696,6 +1895,8 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -1713,6 +1914,8 @@ version = "12.0.0" description = "Python utilities for the NVIDIA Management Library" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e"}, {file = "pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5"}, @@ -1730,6 +1933,8 @@ version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -1744,6 +1949,8 @@ version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" and sys_platform == \"win32\" or python_version >= \"3.12\" and sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -1758,6 +1965,8 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -1780,6 +1989,8 @@ version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -1798,6 +2009,8 @@ version = "0.6.0" description = "Manage dependencies of tests" optional = false python-versions = ">=3.4" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-dependency-0.6.0.tar.gz", hash = "sha256:934b0e6a39d95995062c193f7eaeed8a8ffa06ff1bcef4b62b0dc74a708bacc1"}, ] @@ -1812,6 +2025,8 @@ version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -1829,6 +2044,8 @@ version = "1.0.0" description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." optional = false python-versions = "*" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"}, {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"}, @@ -1848,6 +2065,8 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -1868,6 +2087,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1878,13 +2099,15 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -1893,6 +2116,8 @@ version = "2.1.0" description = "Python library to build pretty command line user prompts ⭐️" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, @@ -1907,6 +2132,8 @@ version = "0.2.1" description = "Generate random adj-noun names like docker and github." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "randomname-0.2.1.tar.gz", hash = "sha256:b79b98302ba4479164b0a4f87995b7bebbd1d91012aeda483341e3e58ace520e"}, ] @@ -1916,99 +2143,101 @@ fire = "*" [[package]] name = "rapidfuzz" -version = "3.11.0" +version = "3.12.1" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" -files = [ - {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb8a54543d16ab1b69e2c5ed96cabbff16db044a50eddfc028000138ca9ddf33"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231c8b2efbd7f8d2ecd1ae900363ba168b8870644bb8f2b5aa96e4a7573bde19"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54e7f442fb9cca81e9df32333fb075ef729052bcabe05b0afc0441f462299114"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:906f1f2a1b91c06599b3dd1be207449c5d4fc7bd1e1fa2f6aef161ea6223f165"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed59044aea9eb6c663112170f2399b040d5d7b162828b141f2673e822093fa8"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cb1965a28b0fa64abdee130c788a0bc0bb3cf9ef7e3a70bf055c086c14a3d7e"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b488b244931d0291412917e6e46ee9f6a14376625e150056fe7c4426ef28225"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f0ba13557fec9d5ffc0a22826754a7457cc77f1b25145be10b7bb1d143ce84c6"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3871fa7dfcef00bad3c7e8ae8d8fd58089bad6fb21f608d2bf42832267ca9663"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b2669eafee38c5884a6e7cc9769d25c19428549dcdf57de8541cf9e82822e7db"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ffa1bb0e26297b0f22881b219ffc82a33a3c84ce6174a9d69406239b14575bd5"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:45b15b8a118856ac9caac6877f70f38b8a0d310475d50bc814698659eabc1cdb"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-win32.whl", hash = "sha256:22033677982b9c4c49676f215b794b0404073f8974f98739cb7234e4a9ade9ad"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:be15496e7244361ff0efcd86e52559bacda9cd975eccf19426a0025f9547c792"}, - {file = "rapidfuzz-3.11.0-cp310-cp310-win_arm64.whl", hash = "sha256:714a7ba31ba46b64d30fccfe95f8013ea41a2e6237ba11a805a27cdd3bce2573"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8724a978f8af7059c5323d523870bf272a097478e1471295511cf58b2642ff83"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b63cb1f2eb371ef20fb155e95efd96e060147bdd4ab9fc400c97325dfee9fe1"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82497f244aac10b20710448645f347d862364cc4f7d8b9ba14bd66b5ce4dec18"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:339607394941801e6e3f6c1ecd413a36e18454e7136ed1161388de674f47f9d9"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84819390a36d6166cec706b9d8f0941f115f700b7faecab5a7e22fc367408bc3"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eea8d9e20632d68f653455265b18c35f90965e26f30d4d92f831899d6682149b"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b659e1e2ea2784a9a397075a7fc395bfa4fe66424042161c4bcaf6e4f637b38"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1315cd2a351144572e31fe3df68340d4b83ddec0af8b2e207cd32930c6acd037"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a7743cca45b4684c54407e8638f6d07b910d8d811347b9d42ff21262c7c23245"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5bb636b0150daa6d3331b738f7c0f8b25eadc47f04a40e5c23c4bfb4c4e20ae3"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:42f4dd264ada7a9aa0805ea0da776dc063533917773cf2df5217f14eb4429eae"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51f24cb39e64256221e6952f22545b8ce21cacd59c0d3e367225da8fc4b868d8"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-win32.whl", hash = "sha256:aaf391fb6715866bc14681c76dc0308f46877f7c06f61d62cc993b79fc3c4a2a"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:ebadd5b8624d8ad503e505a99b8eb26fe3ea9f8e9c2234e805a27b269e585842"}, - {file = "rapidfuzz-3.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:d895998fec712544c13cfe833890e0226585cf0391dd3948412441d5d68a2b8c"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f382fec4a7891d66fb7163c90754454030bb9200a13f82ee7860b6359f3f2fa8"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dfaefe08af2a928e72344c800dcbaf6508e86a4ed481e28355e8d4b6a6a5230e"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92ebb7c12f682b5906ed98429f48a3dd80dd0f9721de30c97a01473d1a346576"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1b3ebc62d4bcdfdeba110944a25ab40916d5383c5e57e7c4a8dc0b6c17211a"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c6d7fea39cb33e71de86397d38bf7ff1a6273e40367f31d05761662ffda49e4"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99aebef8268f2bc0b445b5640fd3312e080bd17efd3fbae4486b20ac00466308"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4469307f464ae3089acf3210b8fc279110d26d10f79e576f385a98f4429f7d97"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:eb97c53112b593f89a90b4f6218635a9d1eea1d7f9521a3b7d24864228bbc0aa"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef8937dae823b889c0273dfa0f0f6c46a3658ac0d851349c464d1b00e7ff4252"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d95f9e9f3777b96241d8a00d6377cc9c716981d828b5091082d0fe3a2924b43e"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:b1d67d67f89e4e013a5295e7523bc34a7a96f2dba5dd812c7c8cb65d113cbf28"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d994cf27e2f874069884d9bddf0864f9b90ad201fcc9cb2f5b82bacc17c8d5f2"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-win32.whl", hash = "sha256:ba26d87fe7fcb56c4a53b549a9e0e9143f6b0df56d35fe6ad800c902447acd5b"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:b1f7efdd7b7adb32102c2fa481ad6f11923e2deb191f651274be559d56fc913b"}, - {file = "rapidfuzz-3.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:ed78c8e94f57b44292c1a0350f580e18d3a3c5c0800e253f1583580c1b417ad2"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e60814edd0c9b511b5f377d48b9782b88cfe8be07a98f99973669299c8bb318a"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f28952da055dbfe75828891cd3c9abf0984edc8640573c18b48c14c68ca5e06"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e8f93bc736020351a6f8e71666e1f486bb8bd5ce8112c443a30c77bfde0eb68"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76a4a11ba8f678c9e5876a7d465ab86def047a4fcc043617578368755d63a1bc"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc0e0d41ad8a056a9886bac91ff9d9978e54a244deb61c2972cc76b66752de9c"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8ea35f2419c7d56b3e75fbde2698766daedb374f20eea28ac9b1f668ef4f74"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd340bbd025302276b5aa221dccfe43040c7babfc32f107c36ad783f2ffd8775"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:494eef2c68305ab75139034ea25328a04a548d297712d9cf887bf27c158c388b"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5a167344c1d6db06915fb0225592afdc24d8bafaaf02de07d4788ddd37f4bc2f"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8c7af25bda96ac799378ac8aba54a8ece732835c7b74cfc201b688a87ed11152"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d2a0f7e17f33e7890257367a1662b05fecaf56625f7dbb6446227aaa2b86448b"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d0d26c7172bdb64f86ee0765c5b26ea1dc45c52389175888ec073b9b28f4305"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-win32.whl", hash = "sha256:6ad02bab756751c90fa27f3069d7b12146613061341459abf55f8190d899649f"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:b1472986fd9c5d318399a01a0881f4a0bf4950264131bb8e2deba9df6d8c362b"}, - {file = "rapidfuzz-3.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:c408f09649cbff8da76f8d3ad878b64ba7f7abdad1471efb293d2c075e80c822"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1bac4873f6186f5233b0084b266bfb459e997f4c21fc9f029918f44a9eccd304"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f9f12c2d0aa52b86206d2059916153876a9b1cf9dfb3cf2f344913167f1c3d4"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd501de6f7a8f83557d20613b58734d1cb5f0be78d794cde64fe43cfc63f5f2"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4416ca69af933d4a8ad30910149d3db6d084781d5c5fdedb713205389f535385"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f0821b9bdf18c5b7d51722b906b233a39b17f602501a966cfbd9b285f8ab83cd"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0edecc3f90c2653298d380f6ea73b536944b767520c2179ec5d40b9145e47aa"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4513dd01cee11e354c31b75f652d4d466c9440b6859f84e600bdebfccb17735a"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9727b85511b912571a76ce53c7640ba2c44c364e71cef6d7359b5412739c570"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ab9eab33ee3213f7751dc07a1a61b8d9a3d748ca4458fffddd9defa6f0493c16"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6b01c1ddbb054283797967ddc5433d5c108d680e8fa2684cf368be05407b07e4"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3857e335f97058c4b46fa39ca831290b70de554a5c5af0323d2f163b19c5f2a6"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d98a46cf07c0c875d27e8a7ed50f304d83063e49b9ab63f21c19c154b4c0d08d"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-win32.whl", hash = "sha256:c36539ed2c0173b053dafb221458812e178cfa3224ade0960599bec194637048"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:ec8d7d8567e14af34a7911c98f5ac74a3d4a743cd848643341fc92b12b3784ff"}, - {file = "rapidfuzz-3.11.0-cp39-cp39-win_arm64.whl", hash = "sha256:62171b270ecc4071be1c1f99960317db261d4c8c83c169e7f8ad119211fe7397"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f06e3c4c0a8badfc4910b9fd15beb1ad8f3b8fafa8ea82c023e5e607b66a78e4"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fe7aaf5a54821d340d21412f7f6e6272a9b17a0cbafc1d68f77f2fc11009dcd5"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25398d9ac7294e99876a3027ffc52c6bebeb2d702b1895af6ae9c541ee676702"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a52eea839e4bdc72c5e60a444d26004da00bb5bc6301e99b3dde18212e41465"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c87319b0ab9d269ab84f6453601fd49b35d9e4a601bbaef43743f26fabf496c"}, - {file = "rapidfuzz-3.11.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3048c6ed29d693fba7d2a7caf165f5e0bb2b9743a0989012a98a47b975355cca"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b04f29735bad9f06bb731c214f27253bd8bedb248ef9b8a1b4c5bde65b838454"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7864e80a0d4e23eb6194254a81ee1216abdc53f9dc85b7f4d56668eced022eb8"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3794df87313dfb56fafd679b962e0613c88a293fd9bd5dd5c2793d66bf06a101"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d71da0012face6f45432a11bc59af19e62fac5a41f8ce489e80c0add8153c3d1"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff38378346b7018f42cbc1f6d1d3778e36e16d8595f79a312b31e7c25c50bd08"}, - {file = "rapidfuzz-3.11.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6668321f90aa02a5a789d4e16058f2e4f2692c5230252425c3532a8a62bc3424"}, - {file = "rapidfuzz-3.11.0.tar.gz", hash = "sha256:a53ca4d3f52f00b393fab9b5913c5bafb9afc27d030c8a1db1283da6917a860f"}, +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "rapidfuzz-3.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbb7ea2fd786e6d66f225ef6eef1728832314f47e82fee877cb2a793ebda9579"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ae41361de05762c1eaa3955e5355de7c4c6f30d1ef1ea23d29bf738a35809ab"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc3c39e0317e7f68ba01bac056e210dd13c7a0abf823e7b6a5fe7e451ddfc496"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69f2520296f1ae1165b724a3aad28c56fd0ac7dd2e4cff101a5d986e840f02d4"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34dcbf5a7daecebc242f72e2500665f0bde9dd11b779246c6d64d106a7d57c99"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:773ab37fccf6e0513891f8eb4393961ddd1053c6eb7e62eaa876e94668fc6d31"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ecf0e6de84c0bc2c0f48bc03ba23cef2c5f1245db7b26bc860c11c6fd7a097c"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dc2ebad4adb29d84a661f6a42494df48ad2b72993ff43fad2b9794804f91e45"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8389d98b9f54cb4f8a95f1fa34bf0ceee639e919807bb931ca479c7a5f2930bf"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:165bcdecbfed9978962da1d3ec9c191b2ff9f1ccc2668fbaf0613a975b9aa326"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:129d536740ab0048c1a06ccff73c683f282a2347c68069affae8dbc423a37c50"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b67e390261ffe98ec86c771b89425a78b60ccb610c3b5874660216fcdbded4b"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-win32.whl", hash = "sha256:a66520180d3426b9dc2f8d312f38e19bc1fc5601f374bae5c916f53fa3534a7d"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:82260b20bc7a76556cecb0c063c87dad19246a570425d38f8107b8404ca3ac97"}, + {file = "rapidfuzz-3.12.1-cp310-cp310-win_arm64.whl", hash = "sha256:3a860d103bbb25c69c2e995fdf4fac8cb9f77fb69ec0a00469d7fd87ff148f46"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d9afad7b16d01c9e8929b6a205a18163c7e61b6cd9bcf9c81be77d5afc1067a"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb424ae7240f2d2f7d8dda66a61ebf603f74d92f109452c63b0dbf400204a437"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42149e6d13bd6d06437d2a954dae2184dadbbdec0fdb82dafe92860d99f80519"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:760ac95d788f2964b73da01e0bdffbe1bf2ad8273d0437565ce9092ae6ad1fbc"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cf27e8e4bf7bf9d92ef04f3d2b769e91c3f30ba99208c29f5b41e77271a2614"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00ceb8ff3c44ab0d6014106c71709c85dee9feedd6890eff77c814aa3798952b"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b61c558574fbc093d85940c3264c08c2b857b8916f8e8f222e7b86b0bb7d12"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:346a2d8f17224e99f9ef988606c83d809d5917d17ad00207237e0965e54f9730"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d60d1db1b7e470e71ae096b6456e20ec56b52bde6198e2dbbc5e6769fa6797dc"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2477da227e266f9c712f11393182c69a99d3c8007ea27f68c5afc3faf401cc43"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8499c7d963ddea8adb6cffac2861ee39a1053e22ca8a5ee9de1197f8dc0275a5"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12802e5c4d8ae104fb6efeeb436098325ce0dca33b461c46e8df015c84fbef26"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-win32.whl", hash = "sha256:e1061311d07e7cdcffa92c9b50c2ab4192907e70ca01b2e8e1c0b6b4495faa37"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6e4ed63e204daa863a802eec09feea5448617981ba5d150f843ad8e3ae071a4"}, + {file = "rapidfuzz-3.12.1-cp311-cp311-win_arm64.whl", hash = "sha256:920733a28c3af47870835d59ca9879579f66238f10de91d2b4b3f809d1ebfc5b"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f6235b57ae3faa3f85cb3f90c9fee49b21bd671b76e90fc99e8ca2bdf0b5e4a3"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af4585e5812632c357fee5ab781c29f00cd06bea58f8882ff244cc4906ba6c9e"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5942dc4460e5030c5f9e1d4c9383de2f3564a2503fe25e13e89021bcbfea2f44"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b31ab59e1a0df5afc21f3109b6cfd77b34040dbf54f1bad3989f885cfae1e60"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97c885a7a480b21164f57a706418c9bbc9a496ec6da087e554424358cadde445"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d844c0587d969ce36fbf4b7cbf0860380ffeafc9ac5e17a7cbe8abf528d07bb"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93c95dce8917bf428064c64024de43ffd34ec5949dd4425780c72bd41f9d969"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:834f6113d538af358f39296604a1953e55f8eeffc20cb4caf82250edbb8bf679"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a940aa71a7f37d7f0daac186066bf6668d4d3b7e7ef464cb50bc7ba89eae1f51"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ec9eaf73501c9a7de2c6938cb3050392e2ee0c5ca3921482acf01476b85a7226"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c5ec360694ac14bfaeb6aea95737cf1a6cf805b5fe8ea7fd28814706c7fa838"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6b5e176524653ac46f1802bdd273a4b44a5f8d0054ed5013a8e8a4b72f254599"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-win32.whl", hash = "sha256:6f463c6f1c42ec90e45d12a6379e18eddd5cdf74138804d8215619b6f4d31cea"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:b894fa2b30cd6498a29e5c470cb01c6ea898540b7e048a0342775a5000531334"}, + {file = "rapidfuzz-3.12.1-cp312-cp312-win_arm64.whl", hash = "sha256:43bb17056c5d1332f517b888c4e57846c4b5f936ed304917eeb5c9ac85d940d4"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:97f824c15bc6933a31d6e3cbfa90188ba0e5043cf2b6dd342c2b90ee8b3fd47c"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a973b3f5cabf931029a3ae4a0f72e3222e53d412ea85fc37ddc49e1774f00fbf"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7880e012228722dec1be02b9ef3898ed023388b8a24d6fa8213d7581932510"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c78582f50e75e6c2bc38c791ed291cb89cf26a3148c47860c1a04d6e5379c8e"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d7d9e6a04d8344b0198c96394c28874086888d0a2b2f605f30d1b27b9377b7d"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5620001fd4d6644a2f56880388179cc8f3767670f0670160fcb97c3b46c828af"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0666ab4c52e500af7ba5cc17389f5d15c0cdad06412c80312088519fdc25686d"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27b4d440fa50b50c515a91a01ee17e8ede719dca06eef4c0cccf1a111a4cfad3"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83dccfd5a754f2a0e8555b23dde31f0f7920601bfa807aa76829391ea81e7c67"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b572b634740e047c53743ed27a1bb3b4f93cf4abbac258cd7af377b2c4a9ba5b"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7fa7b81fb52902d5f78dac42b3d6c835a6633b01ddf9b202a3ca8443be4b2d6a"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1d4fbff980cb6baef4ee675963c081f7b5d6580a105d6a4962b20f1f880e1fb"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-win32.whl", hash = "sha256:3fe8da12ea77271097b303fa7624cfaf5afd90261002314e3b0047d36f4afd8d"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:6f7e92fc7d2a7f02e1e01fe4f539324dfab80f27cb70a30dd63a95445566946b"}, + {file = "rapidfuzz-3.12.1-cp313-cp313-win_arm64.whl", hash = "sha256:e31be53d7f4905a6a038296d8b773a79da9ee9f0cd19af9490c5c5a22e37d2e5"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bef5c91d5db776523530073cda5b2a276283258d2f86764be4a008c83caf7acd"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:841e0c2a5fbe8fc8b9b1a56e924c871899932c0ece7fbd970aa1c32bfd12d4bf"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046fc67f3885d94693a2151dd913aaf08b10931639cbb953dfeef3151cb1027c"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4d2d39b2e76c17f92edd6d384dc21fa020871c73251cdfa017149358937a41d"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5857dda85165b986c26a474b22907db6b93932c99397c818bcdec96340a76d5"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c26cd1b9969ea70dbf0dbda3d2b54ab4b2e683d0fd0f17282169a19563efeb1"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf56ea4edd69005786e6c80a9049d95003aeb5798803e7a2906194e7a3cb6472"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fbe7580b5fb2db8ebd53819171ff671124237a55ada3f64d20fc9a149d133960"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:018506a53c3b20dcbda8c93d4484b9eb1764c93d5ea16be103cf6b0d8b11d860"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:325c9c71b737fcd32e2a4e634c430c07dd3d374cfe134eded3fe46e4c6f9bf5d"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:930756639643e3aa02d3136b6fec74e5b9370a24f8796e1065cd8a857a6a6c50"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0acbd27543b158cb915fde03877383816a9e83257832818f1e803bac9b394900"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-win32.whl", hash = "sha256:80ff9283c54d7d29b2d954181e137deee89bec62f4a54675d8b6dbb6b15d3e03"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:fd37e53f0ed239d0cec27b250cec958982a8ba252ce64aa5e6052de3a82fa8db"}, + {file = "rapidfuzz-3.12.1-cp39-cp39-win_arm64.whl", hash = "sha256:4a4422e4f73a579755ab60abccb3ff148b5c224b3c7454a13ca217dfbad54da6"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b7cba636c32a6fc3a402d1cb2c70c6c9f8e6319380aaf15559db09d868a23e56"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b79286738a43e8df8420c4b30a92712dec6247430b130f8e015c3a78b6d61ac2"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dc1937198e7ff67e217e60bfa339f05da268d91bb15fec710452d11fe2fdf60"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b85817a57cf8db32dd5d2d66ccfba656d299b09eaf86234295f89f91be1a0db2"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04283c6f3e79f13a784f844cd5b1df4f518ad0f70c789aea733d106c26e1b4fb"}, + {file = "rapidfuzz-3.12.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a718f740553aad5f4daef790191511da9c6eae893ee1fc2677627e4b624ae2db"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cbdf145c7e4ebf2e81c794ed7a582c4acad19e886d5ad6676086369bd6760753"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0d03ad14a26a477be221fddc002954ae68a9e2402b9d85433f2d0a6af01aa2bb"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1187aeae9c89e838d2a0a2b954b4052e4897e5f62e5794ef42527bf039d469e"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd47dfb1bca9673a48b923b3d988b7668ee8efd0562027f58b0f2b7abf27144c"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187cdb402e223264eebed2fe671e367e636a499a7a9c82090b8d4b75aa416c2a"}, + {file = "rapidfuzz-3.12.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6899b41bf6c30282179f77096c1939f1454836440a8ab05b48ebf7026a3b590"}, + {file = "rapidfuzz-3.12.1.tar.gz", hash = "sha256:6a98bbca18b4a37adddf2d8201856441c26e9c981d8895491b5bc857b5f780eb"}, ] [package.extras] @@ -2020,6 +2249,8 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2041,6 +2272,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -2060,6 +2293,8 @@ version = "0.9.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706"}, {file = "ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf"}, @@ -2087,6 +2322,8 @@ version = "3.0.4" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, @@ -2098,6 +2335,8 @@ version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, @@ -2118,6 +2357,8 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -2129,6 +2370,8 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2140,6 +2383,8 @@ version = "5.0.2" description = "A pure Python implementation of a sliding window memory map manager" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, @@ -2151,6 +2396,8 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2162,6 +2409,8 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -2176,6 +2425,8 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -2191,6 +2442,8 @@ version = "2.5.0" description = "ANSI color formatting for output in terminal" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"}, {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"}, @@ -2205,6 +2458,8 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2216,6 +2471,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_full_version <= \"3.11.0a6\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2257,6 +2514,8 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -2274,17 +2533,37 @@ version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2292,13 +2571,15 @@ files = [ [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -2307,6 +2588,8 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2324,6 +2607,8 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -2335,6 +2620,8 @@ version = "3.0.1" description = "Yet Another Terminal Spinner" optional = false python-versions = ">=3.9,<4.0" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yaspin-3.0.1-py3-none-any.whl", hash = "sha256:c4b5d2ca23ae664b87a5cd53401c5107cef12668a71d9ee5ea5536045f364121"}, {file = "yaspin-3.0.1.tar.gz", hash = "sha256:9c04aa69cce9be83e1ea3134a6712e749e6c0c9cd02599023713e6befd7bf369"}, @@ -2347,6 +2634,6 @@ termcolor = ">=2.3,<3.0" plot = ["matplotlib", "plotly"] [metadata] -lock-version = "2.0" -python-versions = "^3.10,<3.14" -content-hash = "4e8388beb2a64a222deb530d95f56797f14aa2f79024791b5e32f4fb858b8470" +lock-version = "2.1" +python-versions = ">=3.10,<3.14" +content-hash = "b87307deab6d125136242de2adc36049337970d6abea23392c9fdf57761230a6" diff --git a/pyproject.toml b/pyproject.toml index 477e9e80..bbbfd633 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,13 @@ -[tool.poetry] +[project] name = "simvue" -version = "1.1.4" +version = "2.0.0a0" description = "Simulation tracking and monitoring" -authors = ["Simvue Development Team "] +authors = [ + {name = "Simvue Development Team", email = "info@simvue.io"} +] license = "Apache v2" +requires-python = ">=3.10,<3.14" readme = "README.md" -homepage = "https://simvue.io" -repository = "https://github.com/simvue-io/python-api" -documentation = "https://docs.simvue.io" classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", @@ -33,34 +33,37 @@ keywords = [ "alerting", "metrics-gathering" ] +dependencies = [ + "requests (>=2.32.3,<3.0.0)", + "pydantic (>=2.10.6,<3.0.0)", + "tabulate (>=0.9.0,<0.10.0)", + "msgpack (>=1.1.0,<2.0.0)", + "pyjwt (>=2.10.1,<3.0.0)", + "pandas (>=2.2.3,<3.0.0)", + "toml (>=0.10.2,<0.11.0)", + "click (>=8.1.8,<9.0.0)", + "gitpython (>=3.1.44,<4.0.0)", + "humanfriendly (>=10.0,<11.0)", + "randomname (>=0.2.1,<0.3.0)", + "codecarbon (>=2.8.3,<3.0.0)", + "numpy (>=2.2.2,<3.0.0)", + "flatdict (>=4.0.1,<5.0.0)", + "semver (>=3.0.4,<4.0.0)", + "email-validator (>=2.2.0,<3.0.0)", + "psutil (>=6.1.1,<7.0.0)", + "tenacity (>=9.0.0,<10.0.0)", + "typing-extensions (>=4.12.2,<5.0.0) ; python_version < \"3.11\"", +] -[tool.poetry.dependencies] -python = "^3.10,<3.14" -dill = "^0.3.7" -requests = "^2.31.0" -msgpack = "^1.0.7" -tenacity = ">=8.2.3,<10.0.0" -PyJWT = "^2.8.0" -psutil = ">=5.9.8,<7.0.0" -pydantic = "^2.5.3" -pandas = "^2.2.0" -plotly = {version = ">=5.18,<7.0", optional = true} -matplotlib = {version = "^3.8.2", optional = true} -typing_extensions = { version = "^4.11.0", python = "<3.10" } -toml = "^0.10.2" -click = "^8.1.7" -gitpython = "^3.1.43" -humanfriendly = "^10.0" -tabulate = "^0.9.0" -randomname = "^0.2.1" -codecarbon = "^2.7.1" -numpy = "^2.1.2" -semver = "^3.0.2" +[project.urls] +homepage = "https://simvue.io" +repository = "https://github.com/simvue-io/python-api" +documentation = "https://docs.simvue.io" -[tool.poetry.extras] -plot = ["matplotlib", "plotly"] +[project.optional-dependencies] +plot = ["plotly (>=6.0.0,<7.0.0)", "matplotlib (>=3.10.0,<4.0.0)"] -[tool.poetry.scripts] +[project.scripts] simvue_sender = "simvue.bin.sender:run" [tool.poetry.group.dev.dependencies] @@ -72,16 +75,13 @@ pytest-mock = "^3.14.0" pytest-sugar = "^1.0.0" pytest-xdist = "^3.6.1" jinja2 = "^3.1.4" +types-requests = "^2.32.0.20241016" +interrogate = "^1.7.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" -[[tool.poetry.source]] -name = "PyPI" -priority = "primary" - - [tool.ruff] lint.extend-select = ["C901", "T201"] lint.mccabe.max-complexity = 11 @@ -93,6 +93,7 @@ testpaths = [ "tests" ] markers = [ + "codecarbon: tests for emission metrics", "client: tests of Simvue client", "converters: tests for Simvue object converters", "dispatch: test data dispatcher", @@ -104,7 +105,10 @@ markers = [ "api: tests of RestAPI functionality", "unix: tests for UNIX systems only", "metadata: tests of metadata gathering functions", - "proxies: tests for remote/offline Simvue proxies" + "proxies: tests for remote/offline Simvue proxies", + "online: tests for online functionality", + "offline: tests for offline functionality", + "local: tests of functionality which do not involve a server or writing to an offline cache file" ] [tool.interrogate] @@ -112,3 +116,6 @@ ignore-init-method = true fail-under = 95 verbose = 1 exclude = ["docs", "tests"] + +[tool.mypy] +ignore_missing_imports = true diff --git a/simvue/api/__init__.py b/simvue/api/__init__.py new file mode 100644 index 00000000..f56eb001 --- /dev/null +++ b/simvue/api/__init__.py @@ -0,0 +1,8 @@ +""" +Simvue API +========== + +Module contains methods for interacting with a Simvue server +including accessing/updating objects. + +""" diff --git a/simvue/api/objects/__init__.py b/simvue/api/objects/__init__.py new file mode 100644 index 00000000..36950de4 --- /dev/null +++ b/simvue/api/objects/__init__.py @@ -0,0 +1,35 @@ +""" +Simvue API Objects +================== + +The following module defines objects which provide exact representations +of information accessible via the Simvue RestAPI, this provides a lower +level interface towards the development of additional tools/frameworks. + +""" + +from .administrator import Tenant as Tenant, User as User +from .alert import ( + Alert as Alert, + EventsAlert as EventsAlert, + MetricsThresholdAlert as MetricsThresholdAlert, + MetricsRangeAlert as MetricsRangeAlert, + UserAlert as UserAlert, +) +from .storage import ( + S3Storage as S3Storage, + FileStorage as FileStorage, + Storage as Storage, +) +from .artifact import ( + FileArtifact as FileArtifact, + ObjectArtifact as ObjectArtifact, + Artifact as Artifact, +) + +from .stats import Stats as Stats +from .run import Run as Run +from .tag import Tag as Tag +from .folder import Folder as Folder, get_folder_from_path as get_folder_from_path +from .events import Events as Events +from .metrics import Metrics as Metrics diff --git a/simvue/api/objects/administrator/__init__.py b/simvue/api/objects/administrator/__init__.py new file mode 100644 index 00000000..315fc0c2 --- /dev/null +++ b/simvue/api/objects/administrator/__init__.py @@ -0,0 +1,16 @@ +""" +Simvue Admin Objects +==================== + +These are Simvue objects only accessible to an administrator of +the server. + +""" + +from .tenant import Tenant as Tenant +from .user import User as User + +__all__ = [ + "Tenant", + "User", +] diff --git a/simvue/api/objects/administrator/tenant.py b/simvue/api/objects/administrator/tenant.py new file mode 100644 index 00000000..88dd3a14 --- /dev/null +++ b/simvue/api/objects/administrator/tenant.py @@ -0,0 +1,131 @@ +""" +Simvue Tenants +============== + +Contains a class for remotely connecting to Simvue tenants, or defining +a new tenant given relevant arguments. + +""" + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +import pydantic + +from simvue.api.objects.base import write_only, SimvueObject, staging_check + + +class Tenant(SimvueObject): + """Class for interacting with a tenant instance on the server.""" + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: str, + is_enabled: bool = True, + max_request_rate: int = 0, + max_runs: int = 0, + max_data_volume: int = 0, + offline: bool = False, + ) -> Self: + """Create a new tenant on the Simvue server. + + Requires administrator privileges. + + Parameters + ---------- + name: str + the name for this tenant + is_enabled: bool, optional + whether to enable the tenant on creation, default is True + max_request_rate: int, optional + the maximum request rate allowed for this tenant, default is no limit. + max_runs: int, optional + the maximum number of runs allowed within this tenant, default is no limit. + max_data_volume: int, optional + the maximum volume of data allowed within this tenant, default is no limit. + offline: bool, optional + create in offline mode, default is False. + + Returns + ------- + Tenant + a tenant instance with staged changes + + """ + return Tenant( + name=name, + is_enabled=is_enabled, + max_request_rate=max_request_rate, + max_runs=max_runs, + max_data_volume=max_data_volume, + _read_only=False, + _offline=offline, + ) + + @property + def name(self) -> str: + """Retrieve the name of the tenant""" + return self._get_attribute("name") + + @name.setter + @write_only + @pydantic.validate_call + def name(self, name: str) -> None: + """Change name of tenant""" + self._staging["name"] = name + + @property + @staging_check + def is_enabled(self) -> bool: + """Retrieve if tenant is enabled""" + return self._get_attribute("is_enabled") + + @is_enabled.setter + @write_only + @pydantic.validate_call + def is_enabled(self, is_enabled: bool) -> None: + """Enable/disable tenant""" + self._staging["is_enabled"] = is_enabled + + @property + @staging_check + def max_request_rate(self) -> int: + """Retrieve the tenant's maximum request rate""" + return self._get_attribute("max_request_rate") + + @max_request_rate.setter + @write_only + @pydantic.validate_call + def max_request_rate(self, max_request_rate: int) -> None: + """Update tenant's maximum request rate""" + self._staging["max_request_rate"] = max_request_rate + + @property + @staging_check + def max_runs(self) -> int: + """Retrieve the tenant's maximum runs""" + return self._get_attribute("max_runs") + + @max_runs.setter + @write_only + @pydantic.validate_call + def max_runs(self, max_runs: int) -> None: + """Update tenant's maximum runs""" + self._staging["max_runs"] = max_runs + + @property + @staging_check + def max_data_volume(self) -> int: + """Retrieve the tenant's maximum data volume""" + return self._get_attribute("max_data_volume") + + @max_data_volume.setter + @write_only + @pydantic.validate_call + def max_data_volume(self, max_data_volume: int) -> None: + """Update tenant's maximum data volume""" + self._staging["max_data_volume"] = max_data_volume diff --git a/simvue/api/objects/administrator/user.py b/simvue/api/objects/administrator/user.py new file mode 100644 index 00000000..34136561 --- /dev/null +++ b/simvue/api/objects/administrator/user.py @@ -0,0 +1,224 @@ +""" +Simvue Users +============ + +Contains a class for remotely connecting to Simvue users, or defining +a new user given relevant arguments. + +""" + +import pydantic + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +from simvue.api.objects.base import SimvueObject, staging_check, write_only + + +class User(SimvueObject): + """Class for interacting with a user instance on the server.""" + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + username: str, + fullname: str, + email: pydantic.EmailStr, + is_manager: bool, + is_admin: bool, + is_readonly: bool, + welcome: bool, + tenant: str, + enabled: bool = True, + offline: bool = False, + **_, + ) -> Self: + """Create a new user on the Simvue server. + + Requires administrator privileges. + + Parameters + ---------- + username: str + the username for this user + fullname: str + the full name for this user + email: str + the email for this user + is_manager : bool + assign the manager role to this user + is_admin : bool + assign the administrator role to this user + is_readonly : bool + given only read access to this user + welcome : bool + display welcome message to user + tenant : str + the tenant under which to assign this user + enabled: bool, optional + whether to enable the user on creation, default is True + offline: bool, optional + create in offline mode, default is False. + + Returns + ------- + User + a user instance with staged changes + + """ + _user_info: dict[str, str | bool] = { + "username": username, + "fullname": fullname, + "email": email, + "is_manager": is_manager, + "is_readonly": is_readonly, + "welcome": welcome, + "is_admin": is_admin, + "is_enabled": enabled, + } + _user = User( + user=_user_info, + tenant=tenant, + offline=offline, + _read_only=False, + _offline=offline, + ) + _user._staging |= _user_info + return _user + + @classmethod + def get( + cls, *, count: int | None = None, offset: int | None = None, **kwargs + ) -> dict[str, "User"]: + """Retrieve users from the Simvue server. + + Parameters + ---------- + count : int, optional + limit the number of results, default is no limit. + offset : int, optional + start index for results, default is 0. + + Yields + ------ + User + user instance representing user on server + """ + # Currently no user filters + kwargs.pop("filters", None) + return super().get(count=count, offset=offset, **kwargs) + + @property + @staging_check + def username(self) -> str: + """Retrieve the username for the user""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["username"] + return self._get_attribute("username") + + @username.setter + @write_only + @pydantic.validate_call + def username(self, username: str) -> None: + """Set the username for the user""" + self._staging["username"] = username + + @property + @staging_check + def fullname(self) -> str: + """Retrieve the full name for the user""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["fullname"] + return self._get_attribute("fullname") + + @fullname.setter + @write_only + @pydantic.validate_call + def fullname(self, fullname: str) -> None: + """Set the full name for the user""" + self._staging["fullname"] = fullname + + @property + @staging_check + def is_manager(self) -> bool: + """Retrieve if the user has manager privileges""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_manager"] + return self._get_attribute("is_manager") + + @is_manager.setter + @write_only + @pydantic.validate_call + def is_manager(self, is_manager: bool) -> None: + """Set if the user has manager privileges""" + self._staging["is_manager"] = is_manager + + @property + @staging_check + def is_admin(self) -> bool: + """Retrieve if the user has admin privileges""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_admin"] + return self._get_attribute("is_admin") + + @is_admin.setter + @write_only + @pydantic.validate_call + def is_admin(self, is_admin: bool) -> None: + """Set if the user has admin privileges""" + self._staging["is_admin"] = is_admin + + @property + def deleted(self) -> bool: + """Retrieve if the user is pending deletion""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_deleted"] + return self._get_attribute("is_deleted") + + @property + @staging_check + def is_readonly(self) -> bool: + """Retrieve if the user has read-only access""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_readonly"] + return self._get_attribute("is_readonly") + + @is_readonly.setter + @write_only + @pydantic.validate_call + def is_readonly(self, is_readonly: bool) -> None: + """Set if the user has read-only access""" + self._staging["is_readonly"] = is_readonly + + @property + @staging_check + def enabled(self) -> bool: + """Retrieve if the user is enabled""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["is_enabled"] + return self._get_attribute("is_enabled") + + @enabled.setter + @write_only + @pydantic.validate_call + def enabled(self, is_enabled: bool) -> None: + """Set if the user is enabled""" + self._staging["is_enabled"] = is_enabled + + @property + @staging_check + def email(self) -> str: + """Retrieve the user email""" + if self.id and self.id.startswith("offline_"): + return self._get_attribute("user")["email"] + return self._get_attribute("email") + + @email.setter + @write_only + @pydantic.validate_call + def email(self, email: str) -> None: + """Set the user email""" + self._staging["email"] = email diff --git a/simvue/api/objects/alert/__init__.py b/simvue/api/objects/alert/__init__.py new file mode 100644 index 00000000..b9be7d66 --- /dev/null +++ b/simvue/api/objects/alert/__init__.py @@ -0,0 +1,22 @@ +""" +Simvue Alerts +============= + +Creation and management of Alerts on the Simvue server, the +alerts are split into sub-categories to ensure correct arguments +are passed and relevant properties returned. + +""" + +from .fetch import Alert +from .metrics import MetricsThresholdAlert, MetricsRangeAlert +from .events import EventsAlert +from .user import UserAlert + +__all__ = [ + "Alert", + "MetricsRangeAlert", + "MetricsThresholdAlert", + "EventsAlert", + "UserAlert", +] diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py new file mode 100644 index 00000000..0204f6d3 --- /dev/null +++ b/simvue/api/objects/alert/base.py @@ -0,0 +1,152 @@ +""" +Alert Object Base +================= + +Contains general definitions for Simvue Alert objects. + +""" + +import http +import pydantic +import typing +from simvue.api.objects.base import SimvueObject, staging_check, write_only +from simvue.api.request import get as sv_get, get_json_from_response +from simvue.api.url import URL +from simvue.models import NAME_REGEX + + +class AlertBase(SimvueObject): + """Class for interfacing with Simvue alerts + + Contains properties common to all alert types. + """ + + @classmethod + def new(cls, **kwargs): + """Create a new alert""" + pass + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Retrieve an alert from the Simvue server by identifier""" + self._label = "alert" + super().__init__(identifier=identifier, **kwargs) + + def compare(self, other: "AlertBase") -> bool: + """Compare this alert to another""" + return type(self) is type(other) and self.name == other.name + + @staging_check + def get_alert(self) -> dict[str, typing.Any]: + """Retrieve alert definition""" + try: + return self._get_attribute("alert") + except AttributeError: + return {} + + @property + def name(self) -> str: + """Retrieve alert name""" + return self._get_attribute("name") + + @name.setter + @write_only + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: + """Set alert name""" + self._staging["name"] = name + + @property + @staging_check + def description(self) -> str | None: + """Retrieve alert description""" + return self._get_attribute("description") + + @description.setter + @write_only + @pydantic.validate_call + def description(self, description: str | None) -> None: + """Set alert description""" + self._staging["description"] = description + + @property + def run_tags(self) -> list[str]: + """Retrieve automatically assigned tags from runs""" + return self._get_attribute("run_tags") + + @property + @staging_check + def auto(self) -> bool: + """Retrieve if alert has run tag auto-assign""" + return self._get_attribute("auto") + + @auto.setter + @write_only + @pydantic.validate_call + def auto(self, auto: bool) -> None: + """Set alert to use run tag auto-assign""" + self._staging["auto"] = auto + + @property + @staging_check + def notification(self) -> typing.Literal["none", "email"]: + """Retrieve alert notification setting""" + return self._get_attribute("notification") + + @notification.setter + @write_only + @pydantic.validate_call + def notification(self, notification: typing.Literal["none", "email"]) -> None: + """Configure alert notification setting""" + self._staging["notification"] = notification + + @property + def source(self) -> typing.Literal["events", "metrics", "user"]: + """Retrieve alert source""" + return self._get_attribute("source") + + @property + @staging_check + def enabled(self) -> bool: + """Retrieve if alert is enabled""" + return self._get_attribute("enabled") + + @enabled.setter + @write_only + @pydantic.validate_call + def enabled(self, enabled: str) -> None: + """Enable/disable alert""" + self._staging["enabled"] = enabled + + @property + @staging_check + def abort(self) -> bool: + """Retrieve if alert can abort simulations""" + return self._get_attribute("abort") + + @abort.setter + @write_only + @pydantic.validate_call + def abort(self, abort: bool) -> None: + """Configure alert to trigger aborts""" + self._staging["abort"] = abort + + @pydantic.validate_call + def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: + """Set the status of this alert for a given run""" + raise AttributeError( + f"Cannot update state for alert of type '{self.__class__.__name__}'" + ) + + def get_status(self, run_id: str) -> typing.Literal["ok", "critical"]: + """Retrieve the status of this alert for a given run""" + _url: URL = self.url / f"status/{run_id}" + _response = sv_get(url=f"{_url}", headers=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieving status for alert '{self.id}' in run '{run_id}'", + ) + + return _json_response.get("status") diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py new file mode 100644 index 00000000..0d38b63f --- /dev/null +++ b/simvue/api/objects/alert/events.py @@ -0,0 +1,139 @@ +""" +Simvue Event Alerts +=================== + +Interface to event-based Simvue alerts. + +""" + +import typing +import pydantic + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +from simvue.api.objects.base import write_only +from .base import AlertBase, staging_check +from simvue.models import NAME_REGEX + + +class EventsAlert(AlertBase): + """Connect to an event-based alert either locally or on a server""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise a connection to an event alert by identifier""" + self.alert = EventAlertDefinition(self) + super().__init__(identifier, **kwargs) + + @classmethod + def get( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + """Retrieve only alerts of the event alert type""" + raise NotImplementedError("Retrieval of only event alerts is not yet supported") + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + description: str | None, + notification: typing.Literal["none", "email"], + pattern: str, + frequency: pydantic.PositiveInt, + enabled: bool = True, + offline: bool = False, + **_, + ) -> Self: + """Create a new event-based alert + + Note parameters are keyword arguments only. + + Parameters + ---------- + name : str + name of the alert + description : str | None + description for this alert + notification : "none" | "email" + configure notifications sent by this alert + pattern : str + pattern to monitor in event logs + frequency : int + how often to check for updates + enabled : bool, optional + enable this alert upon creation, default is True + offline : bool, optional + create alert locally, default is False + + Returns + ------- + EventAlert + a new event alert with changes staged + + """ + + _alert_definition = {"pattern": pattern, "frequency": frequency} + _alert = EventsAlert( + name=name, + description=description, + notification=notification, + source="events", + alert=_alert_definition, + enabled=enabled, + _read_only=False, + _offline=offline, + ) + _alert._staging |= _alert_definition + return _alert + + +class EventAlertDefinition: + """Event alert definition sub-class""" + + def __init__(self, alert: EventsAlert) -> None: + """Initialise an alert definition with its parent alert""" + self._sv_obj = alert + + def compare(self, other: "EventAlertDefinition") -> bool: + """Compare this definition with that of another EventAlert""" + if not isinstance(other, EventAlertDefinition): + return False + + return all( + [ + self.frequency == other.frequency, + self.pattern == other.pattern, + ] + ) + + @property + def pattern(self) -> str: + """Retrieve the event log pattern monitored by this alert""" + try: + return self._sv_obj.get_alert()["pattern"] + except KeyError as e: + raise RuntimeError( + "Expected key 'pattern' in alert definition retrieval" + ) from e + + @property + @staging_check + def frequency(self) -> int: + """Retrieve the update frequency for this alert""" + try: + return self._sv_obj.get_alert()["frequency"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @frequency.setter + @write_only + @pydantic.validate_call + def frequency(self, frequency: int) -> None: + """Set the update frequency for this alert""" + _alert = self._sv_obj.get_alert() | {"frequency": frequency} + self._sv_obj._staging["alert"] = _alert diff --git a/simvue/api/objects/alert/fetch.py b/simvue/api/objects/alert/fetch.py new file mode 100644 index 00000000..6b5e089e --- /dev/null +++ b/simvue/api/objects/alert/fetch.py @@ -0,0 +1,134 @@ +""" +Simvue Alert Retrieval +====================== + +To simplify case whereby user does not know the alert type associated +with an identifier, use a generic alert object. +""" + +import typing +import http + +import pydantic + +from simvue.api.objects.alert.user import UserAlert +from simvue.api.request import get_json_from_response +from simvue.api.request import get as sv_get +from .events import EventsAlert +from .metrics import MetricsThresholdAlert, MetricsRangeAlert +from .base import AlertBase + +AlertType = EventsAlert | UserAlert | MetricsThresholdAlert | MetricsRangeAlert + + +class Alert: + """Generic Simvue alert retrieval class""" + + @pydantic.validate_call() + def __new__(cls, identifier: str, **kwargs) -> AlertType: + """Retrieve an object representing an alert either locally or on the server by id""" + _alert_pre = AlertBase(identifier=identifier, **kwargs) + if ( + identifier is not None + and identifier.startswith("offline_") + and not _alert_pre._staging.get("source", None) + ): + raise RuntimeError( + "Cannot determine Alert type - this is likely because you are attempting to reconnect " + "to an offline alert which has already been sent to the server. To fix this, use the " + "exact Alert type instead (eg MetricThresholdAlert, MetricRangeAlert etc)." + ) + if _alert_pre.source == "events": + return EventsAlert(identifier=identifier, **kwargs) + elif _alert_pre.source == "metrics" and _alert_pre.get_alert().get("threshold"): + return MetricsThresholdAlert(identifier=identifier, **kwargs) + elif _alert_pre.source == "metrics": + return MetricsRangeAlert(identifier=identifier, **kwargs) + elif _alert_pre.source == "user": + return UserAlert(identifier=identifier, **kwargs) + + raise RuntimeError(f"Unknown source type '{_alert_pre.source}'") + + @classmethod + def get( + cls, + offline: bool = False, + count: int | None = None, + offset: int | None = None, + **kwargs, + ) -> typing.Generator[tuple[str, AlertType], None, None]: + """Fetch all alerts from the server for the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, AlertType] + identifier for an alert + the alert itself as a class instance + """ + if offline: + return + + # Currently no alert filters + kwargs.pop("filters", None) + + _class_instance = AlertBase(_local=True, _read_only=True) + _url = f"{_class_instance._base_url}" + + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count} | kwargs, + ) + + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_label}s", + ) + + if (_data := _json_response.get("data")) is None: + raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") + + _out_dict: dict[str, AlertType] = {} + + for _entry in _json_response["data"]: + _id = _entry.pop("id") + if _entry["source"] == "events": + yield ( + _id, + EventsAlert(_read_only=True, identifier=_id, _local=True, **_entry), + ) + elif _entry["source"] == "user": + yield ( + _id, + UserAlert(_read_only=True, identifier=_id, _local=True, **_entry), + ) + elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( + "threshold" + ): + yield ( + _id, + MetricsThresholdAlert( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) + elif _entry["source"] == "metrics" and _entry.get("alert", {}).get( + "range_low" + ): + yield ( + _id, + MetricsRangeAlert( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) + else: + raise RuntimeError(f"Unrecognised alert source '{_entry['source']}'") diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py new file mode 100644 index 00000000..2fb74f06 --- /dev/null +++ b/simvue/api/objects/alert/metrics.py @@ -0,0 +1,309 @@ +""" +Simvue Metric Alerts +==================== + +Classes for interacting with metric-based alerts either defined +locally or on a Simvue server + +""" + +import pydantic +import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +from simvue.api.objects.base import write_only +from .base import AlertBase, staging_check +from simvue.models import NAME_REGEX + +Aggregate = typing.Literal["average", "sum", "at least one", "all"] +Rule = typing.Literal["is above", "is below", "is inside range", "is outside range"] + + +class MetricsThresholdAlert(AlertBase): + """Class for connecting to/creating a local or remotely defined metric threshold alert""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Connect to a local or remote threshold alert by identifier""" + self.alert = MetricThresholdAlertDefinition(self) + super().__init__(identifier, **kwargs) + + @classmethod + def get( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + """Retrieve only MetricsThresholdAlerts""" + raise NotImplementedError("Retrieve of only metric alerts is not yet supported") + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, + description: str | None, + notification: typing.Literal["none", "email"], + aggregation: Aggregate, + rule: typing.Literal["is above", "is below"], + window: pydantic.PositiveInt, + threshold: float | int, + frequency: pydantic.PositiveInt, + enabled: bool = True, + offline: bool = False, + **_, + ) -> Self: + """Create a new metric threshold alert either locally or on the server + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + name to assign to this alert + description : str | None + description for this alert + metric : str + the metric to monitor, or a globular expression to match multiple metrics + notification : "none" | "email" + the notification settings for this alert + aggregation : "average" | "sum" | "at least one" | "all" + how to aggregate metric values to deduce if alert is triggered + rule : "is above" | "is below" + threshold condition + window : int + window over which to calculate aggregation + threshold : float | int + the value defining the threshold + frequency : int + how often to monitor the metric + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether to create the alert locally, default is False + + """ + _alert_definition = { + "rule": rule, + "frequency": frequency, + "window": window, + "metric": metric, + "aggregation": aggregation, + "threshold": threshold, + } + _alert = MetricsThresholdAlert( + name=name, + description=description, + notification=notification, + source="metrics", + alert=_alert_definition, + enabled=enabled, + _read_only=False, + _offline=offline, + ) + _alert._staging |= _alert_definition + return _alert + + +class MetricsRangeAlert(AlertBase): + """Class for connecting to/creating a local or remotely defined metric range alert""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Connect to a local or remote threshold alert by identifier""" + self.alert = MetricRangeAlertDefinition(self) + super().__init__(identifier, **kwargs) + + def compare(self, other: "MetricsRangeAlert") -> bool: + """Compare two MetricRangeAlerts""" + return self.alert.compare(other) if super().compare(other) else False + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, + description: str | None, + notification: typing.Literal["none", "email"], + aggregation: Aggregate, + rule: typing.Literal["is inside range", "is outside range"], + window: pydantic.PositiveInt, + range_high: float, + range_low: float, + frequency: pydantic.PositiveInt, + enabled: bool = True, + offline: bool = False, + **_, + ) -> Self: + """Create a new metric range alert either locally or on the server + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + name to assign to this alert + metric : str + the metric to monitor + description : str | None + description for this alert + notification : "none" | "email" + the notification settings for this alert + aggregation : "average" | "sum" | "at least one" | "all" + how to aggregate metric values to deduce if alert is triggered + rule : "is inside range" | "is outside range" + threshold condition + window : int + window over which to calculate aggregation + range_high : float | int + the value defining the upper limit + range_low : float | int + the value defining the lower limit + frequency : int | None + how often to monitor the metric + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether to create the alert locally, default is False + + """ + if range_low >= range_high: + raise ValueError(f"Invalid arguments for range [{range_low}, {range_high}]") + + _alert_definition = { + "rule": rule, + "frequency": frequency, + "window": window, + "metric": metric, + "aggregation": aggregation, + "range_low": range_low, + "range_high": range_high, + } + _alert = MetricsRangeAlert( + name=name, + description=description, + notification=notification, + source="metrics", + enabled=enabled, + alert=_alert_definition, + _read_only=False, + _offline=offline, + ) + _alert._staging |= _alert_definition + return _alert + + +class MetricsAlertDefinition: + """General alert definition for a metric alert""" + + def __init__(self, alert: MetricsRangeAlert) -> None: + """Initialise definition with target alert""" + self._sv_obj = alert + + def compare(self, other: "MetricsAlertDefinition") -> bool: + """Compare a MetricsAlertDefinition with another""" + return all( + [ + self.aggregation == other.aggregation, + self.frequency == other.frequency, + self.rule == other.rule, + self.window == other.window, + ] + ) + + @property + def aggregation(self) -> Aggregate: + """Retrieve the aggregation strategy for this alert""" + if not (_aggregation := self._sv_obj.get_alert().get("aggregation")): + raise RuntimeError( + "Expected key 'aggregation' in alert definition retrieval" + ) + return _aggregation + + @property + def rule(self) -> Rule: + """Retrieve the rule for this alert""" + if not (_rule := self._sv_obj.get_alert().get("rule")): + raise RuntimeError("Expected key 'rule' in alert definition retrieval") + return _rule + + @property + def window(self) -> int: + """Retrieve the aggregation window for this alert""" + if not (_window := self._sv_obj.get_alert().get("window")): + raise RuntimeError("Expected key 'window' in alert definition retrieval") + return _window + + @property + @staging_check + def frequency(self) -> int: + """Retrieve the monitor frequency for this alert""" + try: + return self._sv_obj.get_alert()["frequency"] + except KeyError as e: + raise RuntimeError( + "Expected key 'frequency' in alert definition retrieval" + ) from e + + @frequency.setter + @write_only + @pydantic.validate_call + def frequency(self, frequency: int) -> None: + """Set the monitor frequency for this alert""" + _alert = self._sv_obj.get_alert() | {"frequency": frequency} + self._sv_obj._staging["alert"] = _alert + + +class MetricThresholdAlertDefinition(MetricsAlertDefinition): + """Alert definition for metric threshold alerts""" + + def compare(self, other: "MetricThresholdAlertDefinition") -> bool: + """Compare this MetricThresholdAlertDefinition with another""" + if not isinstance(other, MetricThresholdAlertDefinition): + return False + + return all([super().compare(other), self.threshold == other.threshold]) + + @property + def threshold(self) -> float: + """Retrieve the threshold value for this alert""" + if not (threshold_l := self._sv_obj.get_alert().get("threshold")): + raise RuntimeError("Expected key 'threshold' in alert definition retrieval") + return threshold_l + + +class MetricRangeAlertDefinition(MetricsAlertDefinition): + """Alert definition for metric range alerts""" + + def compare(self, other: "MetricRangeAlertDefinition") -> bool: + """Compare a MetricRangeAlertDefinition with another""" + if not isinstance(other, MetricRangeAlertDefinition): + return False + + return all( + [ + super().compare(other), + self.range_high == other.range_high, + self.range_low == other.range_low, + ] + ) + + @property + def range_low(self) -> float: + """Retrieve the lower limit for metric range""" + if not (range_l := self._sv_obj.get_alert().get("range_low")): + raise RuntimeError("Expected key 'range_low' in alert definition retrieval") + return range_l + + @property + def range_high(self) -> float: + """Retrieve upper limit for metric range""" + if not (range_u := self._sv_obj.get_alert().get("range_high")): + raise RuntimeError( + "Expected key 'range_high' in alert definition retrieval" + ) + return range_u diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py new file mode 100644 index 00000000..9ddcd6e1 --- /dev/null +++ b/simvue/api/objects/alert/user.py @@ -0,0 +1,108 @@ +""" +Simvue User Alert +================= + +Class for connecting with a local/remote user defined alert. + +""" + +import pydantic +import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +import http + +from simvue.api.request import get_json_from_response, put as sv_put +from .base import AlertBase +from simvue.models import NAME_REGEX + + +class UserAlert(AlertBase): + """Connect to/create a user defined alert either locally or on server""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + super().__init__(identifier, **kwargs) + self._local_status: dict[str, str | None] = kwargs.pop("status", {}) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + description: str | None, + notification: typing.Literal["none", "email"], + enabled: bool = True, + offline: bool = False, + **_, + ) -> Self: + """Create a new user-defined alert + + Note all arguments are keyword arguments. + + Parameters + ---------- + name : str + the name to assign to this alert + description : str | None + description for this alert + notification : "none" | "email" + configure notification settings for this alert + enabled : bool, optional + whether this alert is enabled upon creation, default is True + offline : bool, optional + whether this alert should be created locally, default is False + + """ + return UserAlert( + name=name, + description=description, + notification=notification, + source="user", + enabled=enabled, + _read_only=False, + _offline=offline, + ) + + @classmethod + def get( + cls, count: int | None = None, offset: int | None = None + ) -> dict[str, typing.Any]: + """Return only UserAlerts""" + raise NotImplementedError("Retrieve of only user alerts is not yet supported") + + def get_status(self, run_id: str) -> typing.Literal["ok", "critical"] | None: + """Retrieve current alert status for the given run""" + if self._offline: + return self._staging.get("status", self._local_status).get(run_id) + + return super().get_status(run_id) + + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + """Set status update on reconnect""" + for offline_id, status in self._staging.get("status", {}).items(): + self.set_status(id_mapping.get(offline_id), status) + + @pydantic.validate_call + def set_status(self, run_id: str, status: typing.Literal["ok", "critical"]) -> None: + """Set the status of this alert for a given run""" + if self._offline: + if "status" not in self._staging: + self._staging["status"] = {} + self._staging["status"][run_id] = status + return + + _response = sv_put( + url=self.url / "status" / run_id, + data={"status": status}, + headers=self._headers, + ) + + get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Updating state of alert '{self._identifier}' to '{status}'", + ) diff --git a/simvue/api/objects/artifact/__init__.py b/simvue/api/objects/artifact/__init__.py new file mode 100644 index 00000000..168fb3a8 --- /dev/null +++ b/simvue/api/objects/artifact/__init__.py @@ -0,0 +1,5 @@ +from .fetch import Artifact as Artifact +from .file import FileArtifact as FileArtifact +from .object import ObjectArtifact as ObjectArtifact + +__all__ = ["Artifact", "FileArtifact", "ObjectArtifact"] diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py new file mode 100644 index 00000000..dac009f5 --- /dev/null +++ b/simvue/api/objects/artifact/base.py @@ -0,0 +1,243 @@ +""" +Simvue Artifact +=============== + +Class for defining and interacting with artifact objects. + +""" + +import datetime +import http +import io +import typing +import pydantic + +try: + from typing import Self +except ImportError: + from typing_extensions import Self # noqa: F401 + +from simvue.api.url import URL +from simvue.exception import ObjectNotFoundError +from simvue.models import DATETIME_FORMAT +from simvue.api.objects.base import SimvueObject, staging_check, write_only +from simvue.api.objects.run import Run +from simvue.api.request import ( + put as sv_put, + get_json_from_response, + post as sv_post, + get as sv_get, +) + +Category = typing.Literal["code", "input", "output"] + +UPLOAD_TIMEOUT: int = 30 +DOWNLOAD_TIMEOUT: int = 30 +DOWNLOAD_CHUNK_SIZE: int = 8192 + + +class ArtifactBase(SimvueObject): + """Connect to/create an artifact locally or on the server""" + + def __init__( + self, identifier: str | None = None, _read_only: bool = True, **kwargs + ) -> None: + self._label = "artifact" + self._endpoint = f"{self._label}s" + super().__init__(identifier=identifier, _read_only=_read_only, **kwargs) + + # If the artifact is an online instance, need a place to store the response + # from the initial creation + self._init_data: dict[str, dict] = {} + + def commit(self) -> None: + self._logger.info("Cannot call method 'commit' on write-once type 'Artifact'") + + def attach_to_run(self, run_id: str, category: Category) -> None: + """Attach this artifact to a given run""" + self._init_data["runs"][run_id] = category + + if self._offline: + self._staging["runs"] = self._init_data["runs"] + super().commit() + return + + _run_artifacts_url = ( + URL(self._user_config.server.url) + / f"runs/{run_id}/artifacts/{self._init_data['id']}" + ) + + _response = sv_put( + url=f"{_run_artifacts_url}", + headers=self._headers, + json={"category": category}, + ) + + get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"adding artifact '{self.name}' to run '{run_id}'", + response=_response, + ) + + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + _offline_staging = self._init_data["runs"].copy() + for id, category in _offline_staging.items(): + self.attach_to_run(run_id=id_mapping[id], category=category) + + def _upload(self, file: io.BytesIO) -> None: + if self._offline: + super().commit() + return + + if not (_url := self._staging.get("url")): + return + + _name = self._staging["name"] + + _response = sv_post( + url=_url, + headers={}, + is_json=False, + files={"file": file}, + data=self._init_data.get("fields"), + ) + + self._logger.debug( + "Got status code %d when uploading artifact", + _response.status_code, + ) + + get_json_from_response( + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], + allow_parse_failure=True, # JSON response from S3 not parsible + scenario=f"uploading artifact '{_name}' to object storage", + response=_response, + ) + + # Temporarily remove read-only state + self.read_only(False) + + # Update the server status to confirm file uploaded + self.uploaded = True + super().commit() + self.read_only(True) + + def _get( + self, storage: str | None = None, url: str | None = None, **kwargs + ) -> dict[str, typing.Any]: + return super()._get( + storage=storage or self._staging.get("server", {}).get("storage_id"), + url=url, + **kwargs, + ) + + @property + def checksum(self) -> str: + """Retrieve the checksum for this artifact""" + return self._get_attribute("checksum") + + @property + def storage_url(self) -> URL | None: + """Retrieve upload URL for artifact""" + return URL(_url) if (_url := self._init_data.get("url")) else None + + @property + def original_path(self) -> str: + """Retrieve the original path of the file associated with this artifact""" + return self._get_attribute("original_path") + + @property + def storage_id(self) -> str | None: + """Retrieve the storage identifier for this artifact""" + return self._get_attribute("storage_id") + + @property + def mime_type(self) -> str: + """Retrieve the MIME type for this artifact""" + return self._get_attribute("mime_type") + + @property + def size(self) -> int: + """Retrieve the size for this artifact in bytes""" + return self._get_attribute("size") + + @property + def name(self) -> str | None: + """Retrieve name for the artifact""" + return self._get_attribute("name") + + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the artifact""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + + @property + @staging_check + def uploaded(self) -> bool: + """Returns whether a file was uploaded for this artifact.""" + return self._get_attribute("uploaded") + + @uploaded.setter + @write_only + @pydantic.validate_call + def uploaded(self, is_uploaded: bool) -> None: + """Set if a file was successfully uploaded for this artifact.""" + self._staging["uploaded"] = is_uploaded + + @property + def download_url(self) -> URL | None: + """Retrieve the URL for downloading this artifact""" + return self._get_attribute("url") + + @property + def runs(self) -> typing.Generator[str, None, None]: + """Retrieve all runs for which this artifact is related""" + for _id, _ in Run.get(filters=[f"artifact.id == {self.id}"]): + yield _id + + def get_category(self, run_id: str) -> Category: + """Retrieve the category of this artifact with respect to a given run""" + _run_url = ( + URL(self._user_config.server.url) + / f"runs/{run_id}/artifacts/{self._identifier}" + ) + _response = sv_get(url=_run_url, header=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of category for artifact '{self._identifier}' with respect to run '{run_id}'", + ) + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError( + self._label, self._identifier, extra=f"for run '{run_id}'" + ) + + return _json_response["category"] + + @pydantic.validate_call + def download_content(self) -> typing.Generator[bytes, None, None]: + """Stream artifact content""" + if not self.download_url: + raise ValueError( + f"Could not retrieve URL for artifact '{self._identifier}'" + ) + _response = sv_get( + f"{self.download_url}", timeout=DOWNLOAD_TIMEOUT, headers=None + ) + + get_json_from_response( + response=_response, + allow_parse_failure=True, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of file for {self._label} '{self._identifier}'", + ) + + _total_length: str | None = _response.headers.get("content-length") + + if _total_length is None: + yield _response.content + else: + yield from _response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE) diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py new file mode 100644 index 00000000..88f582ce --- /dev/null +++ b/simvue/api/objects/artifact/fetch.py @@ -0,0 +1,119 @@ +from simvue.api.objects.artifact.base import ArtifactBase +from .file import FileArtifact +from simvue.api.objects.artifact.object import ObjectArtifact +from simvue.api.request import get_json_from_response, get as sv_get +from simvue.api.url import URL +from simvue.exception import ObjectNotFoundError + +import http +import typing +import pydantic + +__all__ = ["Artifact"] + + +class Artifact: + """Generic Simvue artifact retrieval class""" + + def __new__(cls, identifier: str | None = None, **kwargs): + """Retrieve an object representing an Artifact by id""" + _artifact_pre = ArtifactBase(identifier=identifier, **kwargs) + if _artifact_pre.original_path: + return FileArtifact(identifier=identifier, **kwargs) + else: + return ObjectArtifact(identifier=identifier, **kwargs) + + @classmethod + def from_name( + cls, run_id: str, name: str, **kwargs + ) -> typing.Union[FileArtifact | ObjectArtifact, None]: + _temp = ArtifactBase(**kwargs) + _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" + _response = sv_get(url=f"{_url}", params={"name": name}, headers=_temp._headers) + _json_response = get_json_from_response( + expected_type=list, + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", + ) + + if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: + raise ObjectNotFoundError(_temp._label, name, extra=f"for run '{run_id}'") + + if (_n_res := len(_json_response)) > 1: + raise RuntimeError( + f"Expected single result for artifact '{name}' for run '{run_id}'" + f" but got {_n_res}" + ) + + _first_result: dict[str, typing.Any] = _json_response[0] + _artifact_id: str = _first_result.pop("id") + + return Artifact( + identifier=_artifact_id, + run=run_id, + **_first_result, + _read_only=True, + _local=True, + ) + + @classmethod + @pydantic.validate_call + def get( + cls, + count: int | None = None, + offset: int | None = None, + **kwargs, + ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: + """Returns artifacts associated with the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, FileArtifact | ObjectArtifact] + identifier for artifact + the artifact itself as a class instance + """ + + _class_instance = ArtifactBase(_local=True, _read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count} | kwargs, + ) + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_label}s", + ) + + if (_data := _json_response.get("data")) is None: + raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") + + _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} + + for _entry in _data: + _id = _entry.pop("id") + if _entry["original_path"]: + yield ( + _id, + FileArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) + else: + yield ( + _id, + ObjectArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py new file mode 100644 index 00000000..7878d514 --- /dev/null +++ b/simvue/api/objects/artifact/file.py @@ -0,0 +1,92 @@ +from .base import ArtifactBase + +import typing +import pydantic +import os +import pathlib +from simvue.models import NAME_REGEX +from simvue.utilities import get_mimetype_for_file, get_mimetypes, calculate_sha256 + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +class FileArtifact(ArtifactBase): + @classmethod + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + storage: str | None, + file_path: pydantic.FilePath, + mime_type: str | None, + metadata: dict[str, typing.Any] | None, + offline: bool = False, + **kwargs, + ) -> Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + storage : str | None + the identifier for the storage location for this object + category : "code" | "input" | "output" + the category of this artifact + file_path : pathlib.Path | str + path to the file this artifact represents + mime_type : str | None + the mime type for this file, else this is determined + metadata : dict[str, Any] | None + supply metadata information for this artifact + offline : bool, optional + whether to define this artifact locally, default is False + + """ + _mime_type = mime_type or get_mimetype_for_file(file_path) + + if _mime_type not in get_mimetypes(): + raise ValueError(f"Invalid MIME type '{mime_type}' specified") + file_path = pathlib.Path(file_path) + _file_size = file_path.stat().st_size + _file_orig_path = file_path.expanduser().absolute() + _file_checksum = calculate_sha256(f"{file_path}", is_file=True) + + kwargs.pop("original_path", None) + kwargs.pop("size", None) + kwargs.pop("checksum", None) + + _artifact = FileArtifact( + name=name, + storage=storage, + original_path=os.path.expandvars(_file_orig_path), + size=_file_size, + mime_type=_mime_type, + checksum=_file_checksum, + _offline=offline, + _read_only=False, + metadata=metadata, + **kwargs, + ) + _artifact._staging["file_path"] = str(file_path) + if offline: + _artifact._init_data = {} + + else: + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._staging["url"] = _artifact._init_data["url"] + + _artifact._init_data["runs"] = kwargs.get("runs") or {} + + if offline: + return _artifact + + with open(file_path, "rb") as out_f: + _artifact._upload(file=out_f) + + return _artifact diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py new file mode 100644 index 00000000..daf8ec4f --- /dev/null +++ b/simvue/api/objects/artifact/object.py @@ -0,0 +1,114 @@ +from .base import ArtifactBase +from simvue.models import NAME_REGEX +from simvue.serialization import serialize_object +from simvue.utilities import calculate_sha256 + +import pydantic +import typing +import sys +import io + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +class ObjectArtifact(ArtifactBase): + def __init__( + self, identifier: str | None = None, _read_only: bool = True, **kwargs + ) -> None: + kwargs.pop("original_path", None) + super().__init__(identifier, _read_only, original_path="", **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + storage: str | None, + obj: typing.Any, + metadata: dict[str, typing.Any] | None, + allow_pickling: bool = True, + offline: bool = False, + **kwargs, + ) -> Self: + """Create a new artifact either locally or on the server + + Note all arguments are keyword arguments + + Parameters + ---------- + name : str + the name for this artifact + storage : str | None + the identifier for the storage location for this object + obj : Any + object to serialize and upload + metadata : dict[str, Any] | None + supply metadata information for this artifact + allow_pickling : bool, optional + whether to allow the object to be pickled if no other + serialization found. Default is True + offline : bool, optional + whether to define this artifact locally, default is False + + """ + # If the object has been saved as a bytes file, obj will be None + if obj is None: + try: + _data_type = kwargs.pop("mime_type") + _serialized = kwargs.pop("serialized") + _checksum = kwargs.pop("checksum") + kwargs.pop("size") + kwargs.pop("original_path") + except KeyError: + raise ValueError("Must provide an object to be saved, not None.") + + else: + _serialization = serialize_object(obj, allow_pickling) + + if not _serialization or not (_serialized := _serialization[0]): + raise ValueError(f"Could not serialize object of type '{type(obj)}'") + + if not (_data_type := _serialization[1]) and not allow_pickling: + raise ValueError( + f"Could not serialize object of type '{type(obj)}' without pickling" + ) + + _checksum = calculate_sha256(_serialized, is_file=False) + + _artifact = ObjectArtifact( + name=name, + storage=storage, + size=sys.getsizeof(_serialized), + mime_type=_data_type, + checksum=_checksum, + metadata=metadata, + _offline=offline, + _read_only=False, + **kwargs, + ) + + if offline: + _artifact._init_data = {} + _artifact._staging["obj"] = None + _artifact._local_staging_file.parent.mkdir(parents=True, exist_ok=True) + with open( + _artifact._local_staging_file.parent.joinpath(f"{_artifact.id}.object"), + "wb", + ) as file: + file.write(_serialized) + + else: + _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._staging["url"] = _artifact._init_data["url"] + + _artifact._init_data["runs"] = kwargs.get("runs") or {} + + if offline: + return _artifact + + _artifact._upload(file=io.BytesIO(_serialized)) + return _artifact diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py new file mode 100644 index 00000000..7f43904f --- /dev/null +++ b/simvue/api/objects/base.py @@ -0,0 +1,540 @@ +""" +Simvue RestAPI Objects +====================== + +Contains base class for interacting with objects on the Simvue server +""" + +import abc +import pathlib +import typing +import inspect +import uuid +import http +import json +import logging + +import msgpack +import pydantic + +from simvue.config.user import SimvueConfiguration +from simvue.exception import ObjectNotFoundError +from simvue.version import __version__ +from simvue.api.request import ( + get as sv_get, + post as sv_post, + put as sv_put, + delete as sv_delete, + get_json_from_response, +) +from simvue.api.url import URL + +logging.basicConfig(level=logging.INFO) + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + + +def staging_check(member_func: typing.Callable) -> typing.Callable: + """Decorator for checking if requested attribute has uncommitted changes""" + + def _wrapper(self) -> typing.Any: + if isinstance(self, SimvueObject): + _sv_obj = self + elif hasattr(self, "_sv_obj"): + _sv_obj = self._sv_obj + else: + raise RuntimeError( + f"Cannot use 'staging_check' decorator on type '{type(self).__name__}'" + ) + if not _sv_obj._read_only and member_func.__name__ in _sv_obj._staging: + _sv_obj._logger.warning( + f"Uncommitted change found for attribute '{member_func.__name__}'" + ) + return member_func(self) + + _wrapper.__name__ = member_func.__name__ + return _wrapper + + +def write_only(attribute_func: typing.Callable) -> typing.Callable: + def _wrapper(self: "SimvueObject", *args, **kwargs) -> typing.Any: + _sv_obj = getattr(self, "_sv_obj", self) + if _sv_obj._read_only: + raise AssertionError( + f"Cannot set property '{attribute_func.__name__}' " + f"on read-only object of type '{self._label}'" + ) + return attribute_func(self, *args, **kwargs) + + _wrapper.__name__ = attribute_func.__name__ + return _wrapper + + +class Visibility: + """Interface for object visibility definition""" + + def __init__(self, sv_obj: "SimvueObject") -> None: + """Initialise visibility with target object""" + self._sv_obj = sv_obj + + def _update_visibility(self, key: str, value: typing.Any) -> None: + """Update the visibility configuration for this object""" + _visibility = self._sv_obj._get_visibility() | {key: value} + self._sv_obj._staging["visibility"] = _visibility + + @property + @staging_check + def users(self) -> list[str]: + """Retrieve the list of users able to see this object""" + return self._sv_obj._get_visibility().get("users", []) + + @users.setter + @write_only + def users(self, users: list[str]) -> None: + """Set the list of users able to see this object""" + self._update_visibility("users", users) + + @property + @staging_check + def public(self) -> bool: + """Retrieve if this object is publically visible""" + return self._sv_obj._get_visibility().get("public", False) # type: ignore + + @public.setter + @write_only + def public(self, public: bool) -> None: + """Set if this object is publically visible""" + self._update_visibility("public", public) + + @property + @staging_check + def tenant(self) -> bool: + """Retrieve the tenant group this object is visible to""" + return self._sv_obj._get_visibility().get("tenant", False) # type: ignore + + @tenant.setter + @write_only + def tenant(self, tenant: bool) -> None: + """Set the tenant group this object is visible to""" + self._update_visibility("tenant", tenant) + + +class SimvueObject(abc.ABC): + def __init__( + self, + identifier: str | None = None, + _read_only: bool = True, + _local: bool = False, + _user_agent: str | None = None, + _offline: bool = False, + **kwargs, + ) -> None: + self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") + self._label: str = getattr(self, "_label", self.__class__.__name__.lower()) + self._read_only: bool = _read_only + self._endpoint: str = getattr(self, "_endpoint", f"{self._label}s") + self._identifier: str | None = ( + identifier if identifier is not None else f"offline_{uuid.uuid1()}" + ) + self._properties = [ + name + for name, member in inspect.getmembers(self.__class__) + if isinstance(member, property) + ] + self._offline: bool = _offline or ( + identifier is not None and identifier.startswith("offline_") + ) + + _config_args = { + "server_url": kwargs.pop("server_url", None), + "server_token": kwargs.pop("server_token", None), + "mode": "offline" if self._offline else "online", + } + + self._user_config = SimvueConfiguration.fetch(**_config_args) + + # Use a single file for each object so we can have parallelism + # e.g. multiple runs writing at the same time + self._local_staging_file: pathlib.Path = ( + self._user_config.offline.cache.joinpath( + self._endpoint, f"{self._identifier}.json" + ) + ) + + self._headers: dict[str, str] = { + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", + "User-Agent": _user_agent or f"Simvue Python client {__version__}", + } + + self._staging: dict[str, typing.Any] = {} + + # If this object is read-only, but not a local construction, make an API call + if ( + not self._identifier.startswith("offline_") + and self._read_only + and not _local + ): + self._staging = self._get() + + # Recover any locally staged changes if not read-only + self._staging |= ( + {} if (_read_only and not self._offline) else self._get_local_staged() + ) + + self._staging |= kwargs + + def _get_local_staged(self, obj_label: str | None = None) -> dict[str, typing.Any]: + """Retrieve any locally staged data for this identifier""" + if not self._local_staging_file.exists() or not self._identifier: + return {} + + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + return _staged_data + + def _stage_to_other(self, obj_label: str, key: str, value: typing.Any) -> None: + """Stage a change to another object type""" + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + if key not in _staged_data[obj_label]: + _staged_data[key] = value + return + + if isinstance(_staged_data[key], list): + if not _staged_data.get(key): + _staged_data[key] = [] + _staged_data[key] += value + elif isinstance(_staged_data[key], dict): + if not _staged_data.get(key): + _staged_data[key] = {} + _staged_data[key] |= value + else: + _staged_data[key] = value + + with self._local_staging_file.open("w") as out_f: + json.dump(_staged_data, out_f, indent=2) + + def _get_attribute( + self, attribute: str, *default, url: str | None = None + ) -> typing.Any: + # In the case where the object is read-only, staging is the data + # already retrieved from the server + _attribute_is_property: bool = attribute in self._properties + _state_is_read_only: bool = getattr(self, "_read_only", True) + _offline_state: bool = ( + self._identifier is not None and self._identifier.startswith("offline_") + ) + + if (_attribute_is_property and _state_is_read_only) or _offline_state: + try: + return self._staging[attribute] + except KeyError as e: + # If the key is not in staging, but the object is not in offline mode + # retrieve from the server and update cache instead + if not _offline_state and ( + _attribute := self._get(url=url).get(attribute) + ): + self._staging[attribute] = _attribute + return _attribute + raise AttributeError( + f"Could not retrieve attribute '{attribute}' " + f"for {self._label} '{self._identifier}' from cached data" + ) from e + + try: + self._logger.debug( + f"Retrieving attribute '{attribute}' from {self._label} '{self._identifier}'" + ) + return self._get(url=url)[attribute] + except KeyError as e: + if default: + return default[0] + + if self._offline: + raise AttributeError( + f"A value for attribute '{attribute}' has " + f"not yet been committed for offline {self._label} '{self._identifier}'" + ) from e + raise RuntimeError( + f"Expected key '{attribute}' for {self._label} '{self._identifier}'" + ) from e + + def _clear_staging(self) -> None: + self._staging = {} + + if not self._local_staging_file.exists(): + return + + with self._local_staging_file.open() as in_f: + _staged_data = json.load(in_f) + + if _staged_data.get(self._label): + _staged_data[self._label].pop(self._identifier, None) + + with self._local_staging_file.open("w") as out_f: + json.dump(_staged_data, out_f, indent=2) + + def _get_visibility(self) -> dict[str, bool | list[str]]: + try: + return self._get_attribute("visibility") + except AttributeError: + return {} + + @classmethod + def new(cls, **_) -> Self: + pass + + @classmethod + def ids( + cls, count: int | None = None, offset: int | None = None, **kwargs + ) -> list[str]: + """Retrieve a list of all object identifiers""" + _class_instance = cls(_read_only=True, _local=True) + if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + return [_entry["id"] for _entry in _data] + + @classmethod + @pydantic.validate_call + def get( + cls, + count: pydantic.PositiveInt | None = None, + offset: pydantic.NonNegativeInt | None = None, + **kwargs, + ) -> typing.Generator[tuple[str, Self | None], None, None]: + _class_instance = cls(_read_only=True, _local=True) + if (_data := cls._get_all_objects(count, offset, **kwargs).get("data")) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + if not (_id := _entry.pop("id", None)): + raise RuntimeError( + f"Expected key 'id' for {_class_instance.__class__.__name__.lower()}" + ) + yield _id, cls(_read_only=True, identifier=_id, _local=True, **_entry) + + @classmethod + def count(cls, **kwargs) -> int: + _class_instance = cls(_read_only=True) + if ( + _count := cls._get_all_objects(count=None, offset=None, **kwargs).get( + "count" + ) + ) is None: + raise RuntimeError( + f"Expected key 'count' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + return _count + + @classmethod + def _get_all_objects( + cls, count: int | None, offset: int | None, **kwargs + ) -> dict[str, typing.Any]: + _class_instance = cls(_read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count} | kwargs, + ) + + _label = _class_instance.__class__.__name__.lower() + if _label.endswith("s"): + _label = _label[:-1] + + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_label}s", + ) + + def read_only(self, is_read_only: bool) -> None: + self._read_only = is_read_only + + # If using writable mode, clear the staging dictionary as + # in this context it contains existing data retrieved + # from the server/local entry which we dont want to + # re-push unnecessarily, then read any locally staged changes + if not self._read_only: + self._staging = self._get_local_staged() + + def commit(self) -> None: + if self._read_only: + raise AttributeError("Cannot commit object in 'read-only' mode") + + if self._offline: + self._logger.debug( + f"Writing updates to staging file for {self._label} '{self.id}': {self._staging}" + ) + self._cache() + return + + # Initial commit is creation of object + # if staging is empty then we do not need to use PUT + if not self._identifier or self._identifier.startswith("offline_"): + self._logger.debug( + f"Posting from staged data for {self._label} '{self.id}': {self._staging}" + ) + self._post(**self._staging) + elif self._staging: + self._logger.debug( + f"Pushing updates from staged data for {self._label} '{self.id}': {self._staging}" + ) + self._put(**self._staging) + + # Clear staged changes + self._clear_staging() + + @property + def id(self) -> str | None: + return self._identifier + + @property + def _base_url(self) -> URL: + return URL(self._user_config.server.url) / self._endpoint + + @property + def url(self) -> URL | None: + return None if self._identifier is None else self._base_url / self._identifier + + def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: + if not is_json: + kwargs = msgpack.packb(kwargs, use_bin_type=True) + _response = sv_post( + url=f"{self._base_url}", + headers=self._headers | {"Content-Type": "application/msgpack"}, + data=kwargs, + is_json=is_json, + ) + + if _response.status_code == http.HTTPStatus.FORBIDDEN: + raise RuntimeError( + f"Forbidden: You do not have permission to create object of type '{self._label}'" + ) + + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Creation of {self._label}", + ) + + if isinstance(_json_response, list): + raise RuntimeError( + "Expected dictionary from JSON response but got type list" + ) + + if _id := _json_response.get("id"): + self._logger.debug("'%s' created successfully", _id) + self._identifier = _id + + return _json_response + + def _put(self, **kwargs) -> dict[str, typing.Any]: + if not self.url: + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + _response = sv_put( + url=f"{self.url}", headers=self._headers, data=kwargs, is_json=True + ) + + if _response.status_code == http.HTTPStatus.FORBIDDEN: + raise RuntimeError( + f"Forbidden: You do not have permission to create object of type '{self._label}'" + ) + + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Creation of {self._label} '{self._identifier}", + ) + + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: + if self._get_local_staged(): + self._local_staging_file.unlink(missing_ok=True) + + if self._offline: + return {"id": self._identifier} + + if not self.url: + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + _response = sv_delete(url=f"{self.url}", headers=self._headers, params=kwargs) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NO_CONTENT], + scenario=f"Deletion of {self._label} '{self._identifier}'", + ) + self._logger.debug("'%s' deleted successfully", self._identifier) + + return _json_response + + def _get( + self, url: str | None = None, allow_parse_failure: bool = False, **kwargs + ) -> dict[str, typing.Any]: + if self._identifier.startswith("offline_"): + return self._get_local_staged() + + if not self.url: + raise RuntimeError(f"Identifier for instance of {self._label} Unknown") + + _response = sv_get( + url=f"{url or self.url}", headers=self._headers, params=kwargs + ) + + if _response.status_code == http.HTTPStatus.NOT_FOUND: + raise ObjectNotFoundError( + obj_type=self._label, name=self._identifier or "Unknown" + ) + + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + allow_parse_failure=allow_parse_failure, + scenario=f"Retrieval of {self._label} '{self._identifier}'", + ) + self._logger.debug("'%s' retrieved successfully", self._identifier) + + if not isinstance(_json_response, dict): + raise RuntimeError( + f"Expected dictionary from JSON response during {self._label} retrieval " + f"but got '{type(_json_response)}'" + ) + return _json_response + + def refresh(self) -> None: + if self._read_only: + self._staging = self._get() + + def _cache(self) -> None: + if not (_dir := self._local_staging_file.parent).exists(): + _dir.mkdir(parents=True) + + _local_data: dict[str, typing.Any] = {"obj_type": self.__class__.__name__} + + if self._local_staging_file.exists(): + with self._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + _local_data |= self._staging + with self._local_staging_file.open("w", encoding="utf-8") as out_f: + json.dump(_local_data, out_f, indent=2) + + def to_dict(self) -> dict[str, typing.Any]: + return self._get() | self._staging + + def on_reconnect(self, id_mapping: dict[str, str]) -> None: + pass + + @property + def staged(self) -> dict[str, typing.Any] | None: + """Return currently staged changes to this object""" + return self._staging or None diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py new file mode 100644 index 00000000..b330501b --- /dev/null +++ b/simvue/api/objects/events.py @@ -0,0 +1,118 @@ +""" +Simvue Server Events +==================== + +Contains a class for remotely connecting to Simvue events, or defining +a new set of events given relevant arguments. + +""" + +import http +import typing +import datetime + +import pydantic + +from simvue.api.url import URL + +from .base import SimvueObject +from simvue.models import DATETIME_FORMAT, EventSet +from simvue.api.request import get as sv_get, get_json_from_response + +__all__ = ["Events"] + + +class Events(SimvueObject): + def __init__( + self, + _read_only: bool = True, + _local: bool = False, + **kwargs, + ) -> None: + self._label = "event" + super().__init__(_read_only=_read_only, _local=_local, **kwargs) + self._run_id = self._staging.get("run") + + @classmethod + @pydantic.validate_call + def get( + cls, + run_id: str, + *, + count: pydantic.PositiveInt | None = None, + offset: pydantic.PositiveInt | None = None, + **kwargs, + ) -> typing.Generator[EventSet, None, None]: + _class_instance = cls(_read_only=True, _local=True) + if ( + _data := cls._get_all_objects(count, offset, run=run_id, **kwargs).get( + "data" + ) + ) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + yield EventSet(**_entry) + + @classmethod + @pydantic.validate_call + def new(cls, *, run: str, offline: bool = False, events: list[EventSet], **kwargs): + """Create a new Events entry on the Simvue server""" + return Events( + run=run, + events=[event.model_dump() for event in events], + _read_only=False, + _offline=offline, + **kwargs, + ) + + def _post(self, **kwargs) -> dict[str, typing.Any]: + return super()._post(is_json=False, **kwargs) + + def _put(self, **kwargs) -> dict[str, typing.Any]: + raise NotImplementedError("Method 'put' is not available for type Events") + + @pydantic.validate_call + def histogram( + self, + timestamp_begin: datetime.datetime, + timestamp_end: datetime.datetime, + window: int, + filters: list[str] | None, + ) -> list[dict[str, str | int]]: + if timestamp_end - timestamp_begin <= datetime.timedelta(seconds=window): + raise ValueError( + "Invalid arguments for datetime range, " + "value difference must be greater than window" + ) + _url: URL = self._base_url / "histogram" + _time_begin: str = timestamp_begin.strftime(DATETIME_FORMAT) + _time_end: str = timestamp_end.strftime(DATETIME_FORMAT) + _response = sv_get( + url=_url, + headers=self._headers, + params={ + "run": self._run_id, + "window": window, + "timestamp_begin": timestamp_begin, + "timestamp_end": timestamp_end, + } + | ({"filters": filters} if filters else {}), + ) + _json_response = get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario="Retrieval of events histogram", + response=_response, + ) + return _json_response.get("data") + + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: + raise NotImplementedError("Cannot delete event set") + + def on_reconnect(self, id_mapping: dict[str, str]): + if online_run_id := id_mapping.get(self._staging["run"]): + self._staging["run"] = online_run_id diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py new file mode 100644 index 00000000..b0313e42 --- /dev/null +++ b/simvue/api/objects/folder.py @@ -0,0 +1,176 @@ +""" +Simvue Server Folder +==================== + +Contains a class for remotely connecting to a Simvue folder, or defining +a new folder given relevant arguments. + +""" + +import pathlib +import typing +import datetime + +from codecarbon.output_methods.emissions_data import json +import pydantic + +from simvue.exception import ObjectNotFoundError + +from .base import SimvueObject, staging_check, write_only +from simvue.models import FOLDER_REGEX, DATETIME_FORMAT + + +class Folder(SimvueObject): + """ + Simvue Folder + ============= + + This class is used to connect to/create folder objects on the Simvue server, + any modification of instance attributes is mirrored on the remote object. + + """ + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise a Folder + + If an identifier is provided a connection will be made to the + object matching the identifier on the target server. + Else a new Folder will be created using arguments provided in kwargs. + + Parameters + ---------- + identifier : str, optional + the remote server unique id for the target folder + read_only : bool, optional + create object in read-only mode + **kwargs : dict + any additional arguments to be passed to the object initialiser + """ + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + offline: bool = False, + **kwargs, + ): + """Create a new Folder on the Simvue server with the given path""" + return Folder(path=path, _read_only=False, _offline=offline, **kwargs) + + @property + @staging_check + def tags(self) -> list[str]: + """Return list of tags assigned to this folder""" + return self._get_attribute("tags") + + @tags.setter + @write_only + @pydantic.validate_call + def tags(self, tags: list[str]) -> None: + """Set tags assigned to this folder""" + self._staging["tags"] = tags + + @property + def path(self) -> pathlib.Path: + """Return the path of this folder""" + return self._get_attribute("path") + + @property + @staging_check + def description(self) -> str | None: + """Return the folder description""" + return self._get().get("description") + + @description.setter + @write_only + @pydantic.validate_call + def description(self, description: str) -> None: + """Update the folder description""" + self._staging["description"] = description + + @property + @staging_check + def name(self) -> str | None: + """Return the folder name""" + return self._get().get("name") + + @name.setter + @write_only + @pydantic.validate_call + def name(self, name: str) -> None: + """Update the folder name""" + self._staging["name"] = name + + @property + @staging_check + def metadata(self) -> dict[str, int | str | None | float | dict] | None: + """Return the folder metadata""" + return self._get().get("metadata") + + @metadata.setter + @write_only + @pydantic.validate_call + def metadata(self, metadata: dict[str, int | str | None | float | dict]) -> None: + """Update the folder metadata""" + self._staging["metadata"] = metadata + + @property + @staging_check + def star(self) -> bool: + """Return if this folder is starred""" + return self._get().get("starred", False) + + @star.setter + @write_only + @pydantic.validate_call + def star(self, is_true: bool = True) -> None: + """Star this folder as a favourite""" + self._staging["starred"] = is_true + + @property + @staging_check + def ttl(self) -> int: + """Return the retention period for this folder""" + return self._get_attribute("ttl") + + @ttl.setter + @write_only + @pydantic.validate_call + def ttl(self, time_seconds: int) -> None: + """Update the retention period for this folder""" + self._staging["ttl"] = time_seconds + + def delete( # should params to this be optional and default to False? + self, + *, + recursive: bool | None = False, + delete_runs: bool | None = False, + runs_only: bool | None = False, + ) -> dict[str, typing.Any]: + return super().delete( + recursive=recursive, runs=delete_runs, runs_only=runs_only + ) + + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + + +@pydantic.validate_call +def get_folder_from_path( + path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], +) -> Folder: + _folders = Folder.get(filters=json.dumps([f"path == {path}"]), count=1) + + try: + _, _folder = next(_folders) + except StopIteration as e: + raise ObjectNotFoundError(obj_type="folder", name=path) from e + return _folder # type: ignore diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py new file mode 100644 index 00000000..de5d3075 --- /dev/null +++ b/simvue/api/objects/metrics.py @@ -0,0 +1,114 @@ +""" +Simvue Server Metrics +===================== + +Contains a class for remotely connecting to Simvue metrics, or defining +a new set of metrics given relevant arguments. + +""" + +import http +import typing +import json + +import pydantic + + +from .base import SimvueObject +from simvue.models import MetricSet +from simvue.api.request import get as sv_get, get_json_from_response + +__all__ = ["Metrics"] + + +class Metrics(SimvueObject): + def __init__( + self, + _read_only: bool = True, + _local: bool = False, + **kwargs, + ) -> None: + self._label = "metric" + super().__init__(_read_only=_read_only, _local=_local, **kwargs) + self._run_id = self._staging.get("run") + + @classmethod + @pydantic.validate_call + def new( + cls, *, run: str, offline: bool = False, metrics: list[MetricSet], **kwargs + ): + """Create a new Events entry on the Simvue server""" + return Metrics( + run=run, + metrics=[metric.model_dump() for metric in metrics], + _read_only=False, + _offline=offline, + ) + + @classmethod + @pydantic.validate_call + def get( + cls, + metrics: list[str], + xaxis: typing.Literal["timestamp", "step", "time"], + *, + count: pydantic.PositiveInt | None = None, + offset: pydantic.PositiveInt | None = None, + **kwargs, + ) -> typing.Generator[MetricSet, None, None]: + _class_instance = cls(_read_only=True, _local=True) + if ( + _data := cls._get_all_objects( + count, + offset, + metrics=json.dumps(metrics), + xaxis=xaxis, + **kwargs, + ).get("data") + ) is None: + raise RuntimeError( + f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" + ) + + for _entry in _data: + yield MetricSet(**_entry) + + @pydantic.validate_call + def span(self, run_ids: list[str]) -> dict[str, int | float]: + """Returns the metrics span for the given runs""" + _url = self._base_url / "span" + _response = sv_get(url=f"{_url}", headers=self._headers, json=run_ids) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving metric spans", + ) + + @pydantic.validate_call + def names(self, run_ids: list[str]) -> list[str]: + """Returns the metric names for the given runs""" + _url = self._base_url / "names" + _response = sv_get( + url=f"{_url}", headers=self._headers, params={"runs": json.dumps(run_ids)} + ) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving metric names", + expected_type=list, + ) + + def _post(self, **kwargs) -> dict[str, typing.Any]: + return super()._post(is_json=False, **kwargs) + + def delete( + self, _linked_objects: list[str] | None = None, **kwargs + ) -> dict[str, typing.Any]: + raise NotImplementedError("Cannot delete metric set") + + def on_reconnect(self, id_mapping: dict[str, str]): + if online_run_id := id_mapping.get(self._staging["run"]): + self._staging["run"] = online_run_id + + def to_dict(self) -> dict[str, typing.Any]: + return self._staging diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py new file mode 100644 index 00000000..9841ded5 --- /dev/null +++ b/simvue/api/objects/run.py @@ -0,0 +1,381 @@ +""" +Simvue Runs +=========== + +Contains a class for remotely connecting to Simvue runs, or defining +a new run given relevant arguments. + +""" + +import http +import typing +import pydantic +import datetime + +try: + from typing import Self +except ImportError: + from typing_extensions import Self + +from .base import SimvueObject, staging_check, Visibility, write_only +from simvue.api.request import ( + get as sv_get, + put as sv_put, + get_json_from_response, +) +from simvue.api.url import URL +from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT + +Status = typing.Literal[ + "lost", "failed", "completed", "terminated", "running", "created" +] + +__all__ = ["Run"] + + +class Run(SimvueObject): + """Class for interacting with/creating runs on the server.""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise a Run + + If an identifier is provided a connection will be made to the + object matching the identifier on the target server. + Else a new Run will be created using arguments provided in kwargs. + + Parameters + ---------- + identifier : str, optional + the remote server unique id for the target run + **kwargs : dict + any additional arguments to be passed to the object initialiser + """ + self.visibility = Visibility(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], + system: dict[str, typing.Any] | None = None, + status: typing.Literal[ + "terminated", "created", "failed", "completed", "lost", "running" + ] = "created", + offline: bool = False, + **kwargs, + ) -> Self: + """Create a new Run on the Simvue server. + + Parameters + ---------- + folder : str + folder to contain this run + offline : bool, optional + create the run in offline mode, default False. + + Returns + ------- + Run + run object with staged changes + """ + return Run( + folder=folder, + system=system, + status=status, + _read_only=False, + _offline=offline, + **kwargs, + ) + + @property + @staging_check + def name(self) -> str: + """Retrieve name associated with this run""" + return self._get_attribute("name") + + def delete(self, **kwargs) -> dict[str, typing.Any]: + # Any metric entries need to also be removed + return super().delete(_linked_objects=["metrics", "events"], **kwargs) + + @name.setter + @write_only + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: + """Set the name for this run.""" + self._staging["name"] = name + + @property + @staging_check + def tags(self) -> list[str]: + """Retrieve the tags associated with this run.""" + return self._get_attribute("tags") + + @tags.setter + @write_only + @pydantic.validate_call + def tags(self, tags: list[str]) -> None: + """Set the tags for this run.""" + self._staging["tags"] = tags + + @property + @staging_check + def status(self) -> Status: + """Get the run status.""" + return self._get_attribute("status") + + @status.setter + @write_only + @pydantic.validate_call + def status(self, status: Status) -> None: + """Set the run status.""" + self._staging["status"] = status + + @property + @staging_check + def ttl(self) -> int: + """Return the retention period for this run""" + return self._get_attribute("ttl") + + @ttl.setter + @write_only + @pydantic.validate_call + def ttl(self, time_seconds: pydantic.NonNegativeInt | None) -> None: + """Update the retention period for this run""" + self._staging["ttl"] = time_seconds + + @property + @staging_check + def folder(self) -> str: + """Get the folder associated with this run.""" + return self._get_attribute("folder") + + @folder.setter + @write_only + @pydantic.validate_call + def folder( + self, folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] + ) -> None: + """Set the folder for this run.""" + self._staging["folder"] = folder + + @property + @staging_check + def metadata(self) -> dict[str, typing.Any]: + """Get the metadata for this run.""" + return self._get_attribute("metadata") + + @metadata.setter + @write_only + @pydantic.validate_call + def metadata(self, metadata: dict[str, typing.Any]) -> None: + """Set the metadata for this run.""" + self._staging["metadata"] = metadata + + @property + @staging_check + def description(self) -> str: + """Get the description for this run.""" + return self._get_attribute("description") + + @description.setter + @write_only + @pydantic.validate_call + def description(self, description: str | None) -> None: + """Set the description for this run.""" + self._staging["description"] = description + + @property + def system(self) -> dict[str, typing.Any]: + """Get the system metadata for this run.""" + return self._get_attribute("system") + + @system.setter + @write_only + @pydantic.validate_call + def system(self, system: dict[str, typing.Any]) -> None: + """Set the system metadata for this run.""" + self._staging["system"] = system + + @property + @staging_check + def heartbeat_timeout(self) -> int | None: + """Get the timeout for the heartbeat of this run.""" + return self._get_attribute("heartbeat_timeout") + + @heartbeat_timeout.setter + @write_only + @pydantic.validate_call + def heartbeat_timeout(self, time_seconds: int | None) -> None: + self._staging["heartbeat_timeout"] = time_seconds + + @property + @staging_check + def notifications(self) -> typing.Literal["none", "email"]: + return self._get_attribute("notifications") + + @notifications.setter + @write_only + @pydantic.validate_call + def notifications(self, notifications: typing.Literal["none", "email"]) -> None: + self._staging["notifications"] = notifications + + @property + @staging_check + def alerts(self) -> typing.Generator[str, None, None]: + for alert in self.get_alert_details(): + yield alert["id"] + + def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, None]: + """Retrieve the full details of alerts for this run""" + for alert in self._get_attribute("alerts"): + yield alert["alert"] + + @alerts.setter + @write_only + @pydantic.validate_call + def alerts(self, alerts: list[str]) -> None: + self._staging["alerts"] = [ + alert for alert in alerts if alert not in self._staging.get("alerts", []) + ] + + @property + @staging_check + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + + @property + @staging_check + def started(self) -> datetime.datetime | None: + """Retrieve started datetime for the run""" + _started: str | None = self._get_attribute("started") + return ( + datetime.datetime.strptime(_started, DATETIME_FORMAT) if _started else None + ) + + @started.setter + @write_only + @pydantic.validate_call + def started(self, started: datetime.datetime) -> None: + self._staging["started"] = started.strftime(DATETIME_FORMAT) + + @property + @staging_check + def endtime(self) -> datetime.datetime | None: + """Retrieve endtime datetime for the run""" + _endtime: str | None = self._get_attribute("endtime") + return ( + datetime.datetime.strptime(_endtime, DATETIME_FORMAT) if _endtime else None + ) + + @endtime.setter + @write_only + @pydantic.validate_call + def endtime(self, endtime: datetime.datetime) -> None: + self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) + + @property + def metrics( + self, + ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: + yield from self._get_attribute("metrics").items() + + @property + def events( + self, + ) -> typing.Generator[tuple[str, dict[str, int | float | bool]], None, None]: + yield from self._get_attribute("events").items() + + @write_only + def send_heartbeat(self) -> dict[str, typing.Any] | None: + if self._offline or not self._identifier: + return None + + _url = self._base_url + _url /= f"{self._identifier}/heartbeat" + _response = sv_put(f"{_url}", headers=self._headers, data={}) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving heartbeat state", + ) + + @property + def _abort_url(self) -> URL | None: + return self.url / "abort" if self.url else None + + @property + def _artifact_url(self) -> URL | None: + if not self._identifier or not self.url: + return None + _url = self.url + _url /= "artifacts" + return _url + + @property + def abort_trigger(self) -> bool: + if self._offline or not self._identifier: + return False + + _response = sv_get(f"{self._abort_url}", headers=self._headers) + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieving abort status for run '{self.id}'", + ) + return _json_response.get("status", False) + + @property + def artifacts(self) -> list[dict[str, typing.Any]]: + """Retrieve the artifacts for this run""" + if self._offline or not self._artifact_url: + return [] + + _response = sv_get(url=self._artifact_url, headers=self._headers) + + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieving artifacts for run '{self.id}'", + expected_type=list, + ) + + @pydantic.validate_call + def abort(self, reason: str) -> dict[str, typing.Any]: + if not self._abort_url: + raise RuntimeError("Cannot abort run, no endpoint defined") + + _response = sv_put( + f"{self._abort_url}", headers=self._headers, data={"reason": reason} + ) + + return get_json_from_response( + expected_status=[http.HTTPStatus.OK], + scenario=f"Abort of run '{self.id}'", + response=_response, + ) + + def on_reconnect(self, id_mapping: dict[str, str]): + online_alert_ids: list[str] = [] + for id in self._staging.get("alerts", []): + try: + online_alert_ids.append(id_mapping[id]) + except KeyError: + raise KeyError( + "Could not find alert ID in offline to online ID mapping." + ) + # If run is offline, no alerts have been added yet, so add all alerts: + if self._identifier is not None and self._identifier.startswith("offline"): + self._staging["alerts"] = online_alert_ids + # Otherwise, only add alerts which have not yet been added + else: + self._staging["alerts"] = [ + id for id in online_alert_ids if id not in list(self.alerts) + ] diff --git a/simvue/api/objects/stats.py b/simvue/api/objects/stats.py new file mode 100644 index 00000000..85819a95 --- /dev/null +++ b/simvue/api/objects/stats.py @@ -0,0 +1,95 @@ +""" +Simvue Stats +============ + +Statistics accessible to the current user. + +""" + +import http +import typing + +from .base import SimvueObject +from simvue.api.request import get as sv_get, get_json_from_response +from simvue.api.url import URL + +__all__ = ["Stats"] + + +class Stats(SimvueObject): + """Class for accessing Server statistics.""" + + def __init__(self) -> None: + self.runs = RunStatistics(self) + self._label = "stat" + super().__init__() + + # Stats is a singular object (i.e. identifier is not applicable) + # set it to empty string so not None + self._identifier = "" + + @classmethod + def new(cls, **kwargs) -> None: + """Creation of multiple stats objects is not logical here""" + raise AttributeError("Creation of statistics objects is not supported") + + def whoami(self) -> dict[str, str]: + """Return the current user""" + _url: URL = URL(self._user_config.server.url) / "whoami" + _response = sv_get(url=f"{_url}", headers=self._headers) + return get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario="Retrieving current user", + ) + + def _get_run_stats(self) -> dict[str, int]: + """Retrieve the run statistics""" + return self._get_attribute("runs") + + def _get_local_staged(self) -> dict[str, typing.Any]: + """No staging for stats so returns empty dict""" + return {} + + def _get_visibility(self) -> dict[str, bool | list[str]]: + """Visibility does not apply here""" + return {} + + def to_dict(self) -> dict[str, typing.Any]: + """Returns dictionary form of statistics""" + return {"runs": self._get_run_stats()} + + +class RunStatistics: + """Interface to the run section of statistics output""" + + def __init__(self, sv_obj: Stats) -> None: + self._sv_obj = sv_obj + + @property + def created(self) -> int: + """Number of created runs""" + if (_created := self._sv_obj._get_run_stats().get("created")) is None: + raise RuntimeError("Expected key 'created' in run statistics retrieval") + return _created + + @property + def running(self) -> int: + """Number of running runs""" + if (_running := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'running' in run statistics retrieval") + return _running + + @property + def completed(self) -> int: + """Number of completed runs""" + if (_completed := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'completed' in run statistics retrieval") + return _completed + + @property + def data(self) -> int: + """Data count""" + if (_data := self._sv_obj._get_run_stats().get("running")) is None: + raise RuntimeError("Expected key 'data' in run statistics retrieval") + return _data diff --git a/simvue/api/objects/storage/__init__.py b/simvue/api/objects/storage/__init__.py new file mode 100644 index 00000000..cfb43777 --- /dev/null +++ b/simvue/api/objects/storage/__init__.py @@ -0,0 +1,13 @@ +""" +Simvue Storage +============== + +Contains classes for interacting with Simvue storage objects, +the storage types are split into classes to ensure correct +inputs are provided and the relevant properties are made available. + +""" + +from .file import FileStorage as FileStorage +from .s3 import S3Storage as S3Storage +from .fetch import Storage as Storage diff --git a/simvue/api/objects/storage/base.py b/simvue/api/objects/storage/base.py new file mode 100644 index 00000000..24bb7cc9 --- /dev/null +++ b/simvue/api/objects/storage/base.py @@ -0,0 +1,105 @@ +""" +Simvue Storage Base +=================== + +Contains general definitions for Simvue Storage objects. +""" + +import typing + +import pydantic +import datetime + +from simvue.api.objects.base import SimvueObject, staging_check, write_only +from simvue.models import NAME_REGEX, DATETIME_FORMAT + + +class StorageBase(SimvueObject): + """Storage object base class from which all storage types inherit. + + This represents a single storage backend used to store uploaded artifacts. + + """ + + def __init__( + self, + identifier: str | None = None, + _read_only: bool = False, + **kwargs, + ) -> None: + """Retrieve an alert from the Simvue server by identifier""" + self._label = "storage" + self._endpoint = self._label + super().__init__(identifier, _read_only=_read_only, **kwargs) + + @classmethod + def new(cls, **_): + """Create a new instance of a storage type""" + pass + + @property + @staging_check + def name(self) -> str: + """Retrieve the name for this storage""" + return self._get_attribute("name") + + @name.setter + @write_only + @pydantic.validate_call + def name( + self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] + ) -> None: + """Set name assigned to this folder""" + self._staging["name"] = name + + @property + def backend(self) -> str: + """Retrieve the backend of storage""" + return self._get_attribute("backend") + + @property + @staging_check + def is_default(self) -> bool: + """Retrieve if this is the default storage for the user""" + return self._get_attribute("is_default") + + @is_default.setter + @write_only + @pydantic.validate_call + def is_default(self, is_default: bool) -> None: + """Set this storage to be the default""" + self._staging["is_default"] = is_default + + @property + @staging_check + def is_tenant_useable(self) -> bool: + """Retrieve if this is usable by the current user tenant""" + return self._get_attribute("is_tenant_useable") + + @is_tenant_useable.setter + @write_only + @pydantic.validate_call + def is_tenant_useable(self, is_tenant_useable: bool) -> None: + """Set this storage to be usable by the current user tenant""" + self._staging["is_tenant_useable"] = is_tenant_useable + + @property + @staging_check + def is_enabled(self) -> bool: + """Retrieve if this is enabled""" + return self._get_attribute("is_enabled") + + @is_enabled.setter + @write_only + @pydantic.validate_call + def is_enabled(self, is_enabled: bool) -> None: + """Set this storage to be usable by the current user tenant""" + self._staging["is_enabled"] = is_enabled + + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the artifact""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) diff --git a/simvue/api/objects/storage/fetch.py b/simvue/api/objects/storage/fetch.py new file mode 100644 index 00000000..ce72e784 --- /dev/null +++ b/simvue/api/objects/storage/fetch.py @@ -0,0 +1,91 @@ +""" +Simvue Storage Retrieval +==============--======== + +To simplify case whereby user does not know the storage type associated +with an identifier, use a generic storage object. +""" + +import typing +import http +import pydantic + +from simvue.api.request import get_json_from_response +from simvue.api.request import get as sv_get + +from .s3 import S3Storage +from .file import FileStorage +from .base import StorageBase + + +class Storage: + """Generic Simvue storage retrieval class""" + + def __new__(cls, identifier: str | None = None, **kwargs): + """Retrieve an object representing an storage either locally or on the server by id""" + _storage_pre = StorageBase(identifier=identifier, **kwargs) + if _storage_pre.backend == "S3": + return S3Storage(identifier=identifier, **kwargs) + elif _storage_pre.backend == "File": + return FileStorage(identifier=identifier, **kwargs) + + raise RuntimeError(f"Unknown backend '{_storage_pre.backend}'") + + @classmethod + @pydantic.validate_call + def get( + cls, count: int | None = None, offset: int | None = None, **kwargs + ) -> typing.Generator[tuple[str, FileStorage | S3Storage], None, None]: + """Returns storage systems accessible to the current user. + + Parameters + ---------- + count : int, optional + limit the number of results, default of None returns all. + offset : int, optional + start index for returned results, default of None starts at 0. + + Yields + ------ + tuple[str, FileStorage | S3Storage] + identifier for a storage + the storage itself as a class instance + """ + + # Currently no storage filters + kwargs.pop("filters", None) + + _class_instance = StorageBase(_local=True, _read_only=True) + _url = f"{_class_instance._base_url}" + _response = sv_get( + _url, + headers=_class_instance._headers, + params={"start": offset, "count": count} | kwargs, + ) + _label: str = _class_instance.__class__.__name__.lower() + _label = _label.replace("base", "") + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK], + scenario=f"Retrieval of {_label}s", + expected_type=list, + ) + + _out_dict: dict[str, FileStorage | S3Storage] = {} + + for _entry in _json_response: + _id = _entry.pop("id") + if _entry["backend"] == "S3": + yield ( + _id, + S3Storage(_local=True, _read_only=True, identifier=_id, **_entry), + ) + elif _entry["backend"] == "File": + yield ( + _id, + FileStorage(_local=True, _read_only=True, identifier=_id, **_entry), + ) + else: + raise RuntimeError( + f"Unrecognised storage backend '{_entry['backend']}'" + ) diff --git a/simvue/api/objects/storage/file.py b/simvue/api/objects/storage/file.py new file mode 100644 index 00000000..821b9099 --- /dev/null +++ b/simvue/api/objects/storage/file.py @@ -0,0 +1,68 @@ +""" +Simvue File Storage +=================== + +Class for interacting with a file based storage on the server. + +""" + +import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +import pydantic + +from .base import StorageBase +from simvue.models import NAME_REGEX + + +class FileStorage(StorageBase): + """Class for defining/accessing a File based storage system on the server.""" + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + disable_check: bool, + is_tenant_useable: bool, + is_enabled: bool, + is_default: bool, + offline: bool = False, + **_, + ) -> Self: + """Create a new file storage object. + + Parameters + ---------- + name : str + name to allocated to this storage system + disable_check : bool + whether to disable checks for this system + tenant_usable : bool + whether this system is usable by the current tenant + is_enabled : bool + whether to enable this system + default : bool + if this storage system should become the new default + offline : bool, optional + if this instance should be created in offline mode, default False + + Returns + ------- + FileStorage + instance of storage system with staged changes + """ + return FileStorage( + name=name, + backend="File", + disable_check=disable_check, + is_tenant_useable=is_tenant_useable, + is_default=is_default, + is_enabled=is_enabled, + _read_only=False, + _offline=offline, + ) diff --git a/simvue/api/objects/storage/s3.py b/simvue/api/objects/storage/s3.py new file mode 100644 index 00000000..9d804b44 --- /dev/null +++ b/simvue/api/objects/storage/s3.py @@ -0,0 +1,179 @@ +""" +Simvue S3 Storage +================= + +Class for interacting with an S3 based storage on the server. + +""" + +import typing + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +import pydantic + +from simvue.api.objects.base import write_only + +from .base import StorageBase, staging_check +from simvue.models import NAME_REGEX + + +class S3Storage(StorageBase): + """Class for defining/accessing an S3 based storage system on the server.""" + + def __init__(self, identifier: str | None = None, **kwargs) -> None: + """Initialise an S3Storage instance attaching a configuration""" + self.config = Config(self) + super().__init__(identifier, **kwargs) + + @classmethod + @pydantic.validate_call + def new( + cls, + *, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + disable_check: bool, + endpoint_url: pydantic.HttpUrl, + region_name: str, + access_key_id: str, + secret_access_key: pydantic.SecretStr, + bucket: str, + is_tenant_useable: bool, + is_default: bool, + is_enabled: bool, + offline: bool = False, + **__, + ) -> Self: + """Create a new S3 storage object. + + Parameters + ---------- + name : str + name to allocated to this storage system + disable_check : bool + whether to disable checks for this system + endpoint_url : str + endpoint defining the S3 upload URL + region_name : str + the region name associated with this storage system + access_key_id : str + the access key identifier for the storage + secret_access_key : str + the secret access key, stored as a secret string + bucket : str + the bucket associated with this storage system + tenant_usable : bool + whether this system is usable by the current tenant + is_enabled : bool + whether to enable this system + is_default : bool + if this storage system should become the new is_default + offline : bool, optional + if this instance should be created in offline mode, is_default False + + Returns + ------- + S3Storage + instance of storage system with staged changes + + """ + _config: dict[str, str] = { + "endpoint_url": endpoint_url.__str__(), + "region_name": region_name, + "access_key_id": access_key_id, + "secret_access_key": secret_access_key.get_secret_value(), + "bucket": bucket, + } + _storage = S3Storage( + name=name, + backend="S3", + config=_config, + disable_check=disable_check, + is_tenant_useable=is_tenant_useable, + is_default=is_default, + is_enabled=is_enabled, + _read_only=False, + _offline=offline, + ) + _storage._staging |= _config + return _storage + + @staging_check + def get_config(self) -> dict[str, typing.Any]: + """Retrieve configuration""" + try: + return self._get_attribute("config") + except AttributeError: + return {} + + +class Config: + """S3 Configuration interface""" + + def __init__(self, storage: S3Storage) -> None: + """Initialise a new configuration using an S3Storage object""" + self._sv_obj = storage + + @property + @staging_check + def endpoint_url(self) -> str: + """Retrieve the endpoint URL for this storage""" + try: + return self._sv_obj.get_config()["endpoint_url"] + except KeyError as e: + raise RuntimeError( + "Expected key 'endpoint_url' in alert definition retrieval" + ) from e + + @endpoint_url.setter + @write_only + @pydantic.validate_call + def endpoint_url(self, endpoint_url: pydantic.HttpUrl) -> None: + """Modify the endpoint URL for this storage""" + _config = self._sv_obj.get_config() | {"endpoint_url": endpoint_url.__str__()} + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def region_name(self) -> str: + """Retrieve the region name for this storage""" + try: + return self._sv_obj.get_config()["region_name"] + except KeyError as e: + raise RuntimeError( + "Expected key 'region_name' in alert definition retrieval" + ) from e + + @region_name.setter + @write_only + @pydantic.validate_call + def region_name(self, region_name: str) -> None: + """Modify the region name for this storage""" + _config = self._sv_obj.get_config() | {"region_name": region_name} + self._sv_obj._staging["config"] = _config + + @property + @staging_check + def bucket(self) -> str: + """Retrieve the bucket label for this storage""" + try: + return self._sv_obj.get_config()["bucket"] + except KeyError as e: + raise RuntimeError( + "Expected key 'bucket' in alert definition retrieval" + ) from e + + @bucket.setter + @write_only + @pydantic.validate_call + def bucket(self, bucket: str) -> None: + """Modify the bucket label for this storage""" + if self._sv_obj.type == "file": + raise ValueError( + f"Cannot set attribute 'bucket' for storage type '{self._sv_obj.type}'" + ) + + _config = self._sv_obj.get_config() | {"bucket": bucket} + self._sv_obj._staging["config"] = _config diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py new file mode 100644 index 00000000..f2ca969a --- /dev/null +++ b/simvue/api/objects/tag.py @@ -0,0 +1,111 @@ +""" +Simvue Server Tag +================= + +Contains a class for remotely connecting to a Simvue Tag, or defining +a new tag given relevant arguments. + +""" + +import pydantic.color +import typing +import datetime +from .base import SimvueObject, staging_check, write_only +from simvue.models import DATETIME_FORMAT + +__all__ = ["Tag"] + + +class Tag(SimvueObject): + """Class for creation/interaction with tag object on server""" + + @classmethod + @pydantic.validate_call + def new(cls, *, name: str, offline: bool = False, **kwargs): + """Create a new Tag on the Simvue server. + + Parameters + ---------- + name : str + name for the tag + offline : bool, optional + create this tag in offline mode, default False. + + Returns + ------- + Tag + tag object with staged attributes + """ + _data: dict[str, typing.Any] = {"name": name} + return Tag(name=name, _read_only=False, _offline=offline, **kwargs) + + @property + @staging_check + def name(self) -> str: + """Retrieve the tag name""" + return self._get_attribute("name") + + @name.setter + @write_only + @pydantic.validate_call + def name(self, name: str) -> None: + """Set the tag name""" + self._staging["name"] = name + + @property + @staging_check + def colour(self) -> pydantic.color.RGBA: + """Retrieve the tag colour""" + return pydantic.color.parse_str(self._get_attribute("colour")) + + @colour.setter + @write_only + @pydantic.validate_call + def colour(self, colour: pydantic.color.Color) -> None: + """Set the tag colour""" + self._staging["colour"] = colour.as_hex() + + @property + @staging_check + def description(self) -> str: + """Get description for this tag""" + return self._get_attribute("description") + + @description.setter + @write_only + @pydantic.validate_call + def description(self, description: str) -> None: + """Set the description for this tag""" + self._staging["description"] = description + + @property + def created(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _created: str | None = self._get_attribute("created") + return ( + datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + ) + + @classmethod + def get( + cls, *, count: int | None = None, offset: int | None = None, **kwargs + ) -> typing.Generator[tuple[str, "SimvueObject"], None, None]: + """Get tags from the server. + + Parameters + ---------- + count : int, optional + limit the number of objects returned, default no limit. + offset : int, optional + start index for results, default is 0. + + Yields + ------ + tuple[str, Tag] + id of tag + Tag object representing object on server + """ + # There are currently no tag filters + kwargs.pop("filters", None) + + return super().get(count=count, offset=offset, **kwargs) diff --git a/simvue/api.py b/simvue/api/request.py similarity index 52% rename from simvue/api.py rename to simvue/api/request.py index 8f294aae..3ebdb86b 100644 --- a/simvue/api.py +++ b/simvue/api/request.py @@ -8,10 +8,11 @@ """ import copy -import json +import json as json_module import typing import http +from codecarbon.external.logger import logging import requests from tenacity import ( retry, @@ -19,7 +20,7 @@ stop_after_attempt, wait_exponential, ) -from .utilities import parse_validation_response +from simvue.utilities import parse_validation_response DEFAULT_API_TIMEOUT = 10 RETRY_MULTIPLIER = 1 @@ -61,7 +62,11 @@ def is_retryable_exception(exception: Exception) -> bool: reraise=True, ) def post( - url: str, headers: dict[str, str], data: typing.Any, is_json: bool = True + url: str, + headers: dict[str, str], + data: typing.Any, + is_json: bool = True, + files: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP POST with retries @@ -83,38 +88,22 @@ def post( """ if is_json: - data_sent: typing.Union[str, dict[str, typing.Any]] = json.dumps(data) + data_sent: str | dict[str, typing.Any] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data + logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( - url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT + url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT, files=files ) - if response.status_code in ( - http.HTTPStatus.UNAUTHORIZED, - http.HTTPStatus.FORBIDDEN, - ): - raise RuntimeError( - f"Authorization error [{response.status_code}]: {response.text}" - ) - if response.status_code == http.HTTPStatus.UNPROCESSABLE_ENTITY: _parsed_response = parse_validation_response(response.json()) raise ValueError( f"Validation error for '{url}' [{response.status_code}]:\n{_parsed_response}" ) - if response.status_code not in ( - http.HTTPStatus.OK, - http.HTTPStatus.CREATED, - http.HTTPStatus.CONFLICT, - ): - raise RuntimeError( - f"HTTP error for '{url}' [{response.status_code}]: {response.text}" - ) - return response @@ -127,7 +116,8 @@ def post( def put( url: str, headers: dict[str, str], - data: dict[str, typing.Any], + data: dict[str, typing.Any] | None = None, + json: dict[str, typing.Any] | None = None, is_json: bool = True, timeout: int = DEFAULT_API_TIMEOUT, ) -> requests.Response: @@ -141,6 +131,8 @@ def put( headers for the post request data : dict[str, typing.Any] data to put + json : dict | None + json data to send is_json : bool, optional send as JSON string, by default True timeout : int, optional @@ -151,17 +143,17 @@ def put( requests.Response response from executing PUT """ - if is_json: - data_sent: typing.Union[str, dict[str, typing.Any]] = json.dumps(data) + if is_json and data: + data_sent: str | dict[str, typing.Any] = json_module.dumps(data) headers = set_json_header(headers) else: data_sent = data - response = requests.put(url, headers=headers, data=data_sent, timeout=timeout) - - response.raise_for_status() + logging.debug(f"PUT: {url}\n\tdata={data_sent}\n\tjson={json}") - return response + return requests.put( + url, headers=headers, data=data_sent, timeout=timeout, json=json + ) @retry( @@ -171,7 +163,11 @@ def put( reraise=True, ) def get( - url: str, headers: dict[str, str], timeout: int = DEFAULT_API_TIMEOUT + url: str, + headers: dict[str, str] | None = None, + params: dict[str, str | int | float | None] | None = None, + timeout: int = DEFAULT_API_TIMEOUT, + json: dict[str, typing.Any] | None = None, ) -> requests.Response: """HTTP GET @@ -183,13 +179,89 @@ def get( headers for the post request timeout : int, optional timeout of request, by default DEFAULT_API_TIMEOUT + json : dict[str, Any] | None, optional + any json to send in request Returns ------- requests.Response response from executing GET """ - response = requests.get(url, headers=headers, timeout=timeout) - response.raise_for_status() + logging.debug(f"GET: {url}\n\tparams={params}") + return requests.get(url, headers=headers, timeout=timeout, params=params, json=json) - return response + +@retry( + wait=wait_exponential(multiplier=RETRY_MULTIPLIER, min=RETRY_MIN, max=RETRY_MAX), + retry=retry_if_exception(is_retryable_exception), + stop=stop_after_attempt(RETRY_STOP), + reraise=True, +) +def delete( + url: str, + headers: dict[str, str], + timeout: int = DEFAULT_API_TIMEOUT, + params: dict[str, typing.Any] | None = None, +) -> requests.Response: + """HTTP DELETE + + Parameters + ---------- + url : str + URL to put to + headers : dict[str, str] + headers for the post request + timeout : int, optional + timeout of request, by default DEFAULT_API_TIMEOUT + params : dict, optional + parameters for deletion + + Returns + ------- + requests.Response + response from executing DELETE + """ + logging.debug(f"DELETE: {url}\n\tparams={params}") + return requests.delete(url, headers=headers, timeout=timeout, params=params) + + +def get_json_from_response( + expected_status: list[int], + scenario: str, + response: requests.Response, + allow_parse_failure: bool = False, + expected_type: typing.Type[dict | list] = dict, +) -> dict | list: + try: + json_response = response.json() + json_response = json_response or ({} if expected_type is dict else []) + decode_error = "" + except json_module.JSONDecodeError as e: + json_response = {} if allow_parse_failure else None + decode_error = f"{e}" + + error_str = f"{scenario} failed for url '{response.url}'" + details: str | None = None + + if (_status_code := response.status_code) in expected_status: + if not isinstance(json_response, expected_type): + details = f"expected type '{expected_type.__name__}' but got '{type(json_response).__name__}'" + elif json_response is not None: + return json_response + else: + details = f"could not request JSON response: {decode_error}" + elif isinstance(json_response, dict): + error_str += f" with status {_status_code}" + details = (json_response or {}).get("detail") + + try: + txt_response = response.text + except UnicodeDecodeError: + txt_response = None + + if details: + error_str += f": {details}" + elif txt_response: + error_str += f": {txt_response}" + + raise RuntimeError(error_str) diff --git a/simvue/api/url.py b/simvue/api/url.py new file mode 100644 index 00000000..a22480a4 --- /dev/null +++ b/simvue/api/url.py @@ -0,0 +1,82 @@ +""" +URL Library +=========== + +Module contains classes for easier handling of URLs. + +""" + +try: + from typing import Self +except ImportError: + from typing_extensions import Self +import urllib.parse +import copy + +import pydantic + + +class URL: + """URL class for ease of construction and use of server endpoints.""" + + @pydantic.validate_call + def __init__(self, url: str) -> None: + """Initialise a url from string form""" + url = url[:-1] if url.endswith("/") else url + + _url = urllib.parse.urlparse(url) + self._scheme: str = _url.scheme + self._path: str = _url.path + self._host: str | None = _url.hostname + self._port: int | None = _url.port + self._fragment: str = _url.fragment + + def __truediv__(self, other: str) -> Self: + """Define URL extension through use of '/'""" + _new = copy.deepcopy(self) + _new /= other + return _new + + @pydantic.validate_call + def __itruediv__(self, other: str) -> Self: + """Define URL extension through use of '/'""" + other = other[1:] if other.startswith("/") else other + other = other[:-1] if other.endswith("/") else other + + self._path = f"{self._path}/{other}" if other else self._path + return self + + @property + def scheme(self) -> str: + return self._scheme + + @property + def path(self) -> str: + return self._path + + @property + def hostname(self) -> str | None: + return self._host + + @property + def fragment(self) -> str: + return self._fragment + + @property + def port(self) -> int | None: + return self._port + + def __str__(self) -> str: + """Construct string form of the URL""" + _out_str: str = "" + if self.scheme: + _out_str += f"{self.scheme}://" + if self.hostname: + _out_str += self.hostname + if self.port: + _out_str += f":{self.port}" + if self.path: + _out_str += self.path + if self.fragment: + _out_str += self.fragment + return _out_str diff --git a/simvue/bin/sender.py b/simvue/bin/sender.py index 67bc1078..649b9d6f 100644 --- a/simvue/bin/sender.py +++ b/simvue/bin/sender.py @@ -1,35 +1,44 @@ """Send runs to server""" -import getpass import logging -import os -import sys -import tempfile -from simvue.sender import sender -from simvue.utilities import create_file, remove_file +from simvue.sender import sender, UPLOAD_ORDER +import argparse -logger = logging.getLogger() -logger.setLevel(logging.DEBUG) - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.INFO) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -handler.setFormatter(formatter) -logger.addHandler(handler) +_logger = logging.getLogger(__name__) +_logger.setLevel(logging.INFO) def run() -> None: - lockfile = os.path.join(tempfile.gettempdir(), f"simvue-{getpass.getuser()}.lock") - - if os.path.isfile(lockfile): - logger.error("Cannot initiate run, locked by other process.") - sys.exit(1) - - create_file(lockfile) + parser = argparse.ArgumentParser(description="My script description") + parser.add_argument( + "-w", + "--max-workers", + type=int, + required=False, + default=5, + help="The maximum number of worker threads to use in parallel, by default 5", + ) + parser.add_argument( + "-n", + "--threading-threshold", + type=int, + required=False, + default=10, + help="The number of objects of a given type above which items will be sent to the server in parallel, by default 10", + ) + parser.add_argument( + "-o", + "--objects-to-upload", + type=str, + nargs="+", + required=False, + default=UPLOAD_ORDER, + help="The object types to upload, by default All", + ) + args = parser.parse_args() try: - sender() + _logger.info("Starting Simvue Sender") + sender(**vars(args)) except Exception as err: - logger.critical("Exception running sender: %s", str(err)) - - remove_file(lockfile) + _logger.critical("Exception running sender: %s", str(err)) diff --git a/simvue/client.py b/simvue/client.py index 5936f9cb..45f152c5 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -6,9 +6,11 @@ server including deletion and retrieval. """ +import contextlib import json import logging import os +import pathlib import typing import http import pydantic @@ -17,6 +19,9 @@ import requests +from simvue.api.objects.alert.base import AlertBase +from simvue.exception import ObjectNotFoundError + from .converters import ( aggregated_metrics_to_dataframe, to_dataframe, @@ -27,63 +32,28 @@ from .utilities import check_extra, prettify_pydantic from .models import FOLDER_REGEX, NAME_REGEX from .config.user import SimvueConfiguration +from .api.request import get_json_from_response +from .api.objects import Run, Folder, Tag, Artifact, Alert -if typing.TYPE_CHECKING: - pass CONCURRENT_DOWNLOADS = 10 DOWNLOAD_CHUNK_SIZE = 8192 -DOWNLOAD_TIMEOUT = 30 logger = logging.getLogger(__file__) -def downloader(job: dict[str, str]) -> bool: - """Download a job output to the location specified within the definition - - Parameters - ---------- - job : dict[str, str] - a dictionary containing information on URL and path for a given job - this information is then used to perform the download - - Returns - ------- - bool - whether the file was created successfully - """ - # Check to make sure all requirements have been retrieved first - for key in ("url", "path", "filename"): - if key not in job: - logger.warning(f"Expected key '{key}' during job object retrieval") - raise RuntimeError( - "Failed to retrieve required information during job download" - ) - +def _download_artifact_to_file( + artifact: Artifact, output_dir: pathlib.Path | None +) -> None: try: - response = requests.get(job["url"], stream=True, timeout=DOWNLOAD_TIMEOUT) - response = requests.get(job["url"], stream=True, timeout=DOWNLOAD_TIMEOUT) - except requests.exceptions.RequestException: - return False - - total_length = response.headers.get("content-length") - total_length = response.headers.get("content-length") - - save_location: str = os.path.join(job["path"], job["filename"]) - - if not os.path.isdir(job["path"]): - raise ValueError(f"Cannot write to '{job['path']}', not a directory.") + _file_name = os.path.basename(artifact.name) + except AttributeError: + _file_name = os.path.basename(artifact) + _output_file = (output_dir or pathlib.Path.cwd()).joinpath(_file_name) - logger.debug(f"Writing file '{save_location}'") - - with open(save_location, "wb") as fh: - if total_length is None: - fh.write(response.content) - else: - for data in response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE): - fh.write(data) - - return os.path.exists(save_location) + with _output_file.open("wb") as out_f: + for content in artifact.download_content(): + out_f.write(content) class Client: @@ -94,8 +64,8 @@ class Client: def __init__( self, *, - server_token: typing.Optional[str] = None, - server_url: typing.Optional[str] = None, + server_token: pydantic.SecretStr | None = None, + server_url: str | None = None, ) -> None: """Initialise an instance of the Simvue client @@ -118,43 +88,9 @@ def __init__( logger.warning(f"No {label} specified") self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token}" + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" } - def _get_json_from_response( - self, - expected_status: list[int], - scenario: str, - response: requests.Response, - ) -> typing.Union[dict, list]: - try: - json_response = response.json() - json_response = json_response or {} - except json.JSONDecodeError: - json_response = None - - error_str = f"{scenario} failed " - - if (_status_code := response.status_code) in expected_status: - if json_response is not None: - return json_response - details = "could not request JSON response" - else: - error_str += f"with status {_status_code}" - details = (json_response or {}).get("details") - - try: - txt_response = response.text - except UnicodeDecodeError: - txt_response = None - - if details: - error_str += f": {details}" - elif txt_response: - error_str += f": {txt_response}" - - raise RuntimeError(error_str) - @prettify_pydantic @pydantic.validate_call def get_run_id_from_name( @@ -181,42 +117,26 @@ def get_run_id_from_name( if either information could not be retrieved from the server, or multiple/no runs are found """ - params: dict[str, str] = {"filters": json.dumps([f"name == {name}"])} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario="Retrieval of run ID from name", - response=response, - ) + _runs = Run.get(filters=json.dumps([f"name == {name}"])) - if not isinstance(json_response, dict): + try: + _id, _ = next(_runs) + except StopIteration as e: raise RuntimeError( - "Expected dictionary as response for ID " - f"retrieval but got {type(json_response)}" - ) + "Could not collect ID - no run found with this name." + ) from e - if not (response_data := json_response.get("data")): - raise RuntimeError(f"No ID found for run '{name}'") - - if len(response_data) == 0: - raise RuntimeError("Could not collect ID - no run found with this name.") - if len(response_data) > 1: + with contextlib.suppress(StopIteration): + next(_runs) raise RuntimeError( "Could not collect ID - more than one run exists with this name." ) - if not (first_id := response_data[0].get("id")): - raise RuntimeError("Failed to retrieve identifier for run.") - return first_id + + return _id @prettify_pydantic @pydantic.validate_call - def get_run(self, run_id: str) -> typing.Optional[dict[str, typing.Any]]: + def get_run(self, run_id: str) -> Run | None: """Retrieve a single run Parameters @@ -234,26 +154,7 @@ def get_run(self, run_id: str) -> typing.Optional[dict[str, typing.Any]]: RuntimeError if retrieval of information from the server on this run failed """ - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}", headers=self._headers - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of run '{run_id}'", - response=response, - ) - - if response.status_code == http.HTTPStatus.NOT_FOUND: - return None - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run retrieval " - f"but got '{type(json_response)}'" - ) - return json_response + return Run(identifier=run_id, read_only=True) @prettify_pydantic @pydantic.validate_call @@ -270,35 +171,23 @@ def get_run_name_from_id(self, run_id: str) -> str: str the registered name for the run """ - if not run_id: - raise ValueError("Expected value for run_id but got None") - - _run_data = self.get_run(run_id) - - if not _run_data: - raise RuntimeError(f"Failed to retrieve data for run '{run_id}'") - - if not (_name := _run_data.get("name")): - raise RuntimeError("Expected key 'name' in server response") - return _name + return Run(identifier=run_id).name @prettify_pydantic @pydantic.validate_call def get_runs( self, - filters: typing.Optional[list[str]], + filters: list[str] | None, *, system: bool = False, metrics: bool = False, alerts: bool = False, metadata: bool = False, - output_format: typing.Literal["dict", "dataframe"] = "dict", - count_limit: typing.Optional[pydantic.PositiveInt] = 100, - start_index: typing.Optional[pydantic.PositiveInt] = 0, + output_format: typing.Literal["dict", "objects", "dataframe"] = "objects", + count_limit: pydantic.PositiveInt | None = 100, + start_index: pydantic.NonNegativeInt = 0, show_shared: bool = False, - ) -> typing.Union[ - DataFrame, list[dict[str, typing.Union[int, str, float, None]]], None - ]: + ) -> DataFrame | typing.Generator[tuple[str, Run], None, None] | None: """Retrieve all runs matching filters. Parameters @@ -315,9 +204,9 @@ def get_runs( alerts : bool, optional whether to include alert information in the response. Default False. - output_format : Literal['dict', 'dataframe'], optional - the structure of the response, either a dictionary or a dataframe. - Default is 'dict'. Pandas must be installed for 'dataframe'. + output_format : Literal['objects', 'dataframe'], optional + the structure of the response, either a dictionary of objects or a dataframe. + Default is 'objects'. Pandas must be installed for 'dataframe'. count_limit : int, optional maximum number of entries to return. Default is 100. start_index : int, optional @@ -327,7 +216,7 @@ def get_runs( Returns ------- - dict | pandas.DataFrame + pandas.DataFrame | Generator[tuple[str, Run], None, None] either the JSON response from the runs request or the results in the form of a Pandas DataFrame @@ -341,21 +230,33 @@ def get_runs( if not show_shared: filters = (filters or []) + ["user == self"] - params = { + _runs = Run.get( + count=count_limit, + offset=start_index, + filters=json.dumps(filters), + return_basic=True, + return_metrics=metrics, + return_alerts=alerts, + return_system=system, + return_metadata=metadata, + ) + + if output_format == "objects": + return _runs + + _params: dict[str, bool | str] = { "filters": json.dumps(filters), "return_basic": True, "return_metrics": metrics, "return_alerts": alerts, "return_system": system, "return_metadata": metadata, - "count": count_limit, - "start": start_index, } response = requests.get( - f"{self._user_config.server.url}/api/runs", + f"{self._user_config.server.url}/runs", headers=self._headers, - params=params, + params=_params, ) response.raise_for_status() @@ -363,28 +264,23 @@ def get_runs( if output_format not in ("dict", "dataframe"): raise ValueError("Invalid format specified") - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario="Run retrieval", response=response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during retrieval of runs " - f"but got '{type(json_response)}'" - ) + if (response_data := json_response.get("data")) is None: + raise RuntimeError("Failed to retrieve runs data") - if (response_data := json_response.get("data")) is not None: + if output_format == "dict": return response_data - elif output_format == "dataframe": - return to_dataframe(response.json()) - else: - raise RuntimeError("Failed to retrieve runs data") + + return to_dataframe(response_data) @prettify_pydantic @pydantic.validate_call - def delete_run(self, run_id: str) -> typing.Optional[dict]: + def delete_run(self, run_id: str) -> dict | None: """Delete run by identifier Parameters @@ -402,29 +298,30 @@ def delete_run(self, run_id: str) -> typing.Optional[dict]: RuntimeError if the deletion failed due to server request error """ + return Run(identifier=run_id).delete() or None - response = requests.delete( - f"{self._user_config.server.url}/api/runs/{run_id}", - headers=self._headers, - ) + def _get_folder_from_path(self, path: str) -> Folder | None: + """Retrieve folder for the specified path if found - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Deletion of run '{run_id}'", - response=response, - ) - - logger.debug(f"Run '{run_id}' deleted successfully") + Parameters + ---------- + path : str + the path to search for - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run deletion " - f"but got '{type(json_response)}'" - ) + Returns + ------- + Folder | None + if a match is found, return the folder + """ + _folders = Folder.get(filters=json.dumps([f"path == {path}"])) - return json_response or None + try: + _, _folder = next(_folders) + return _folder # type: ignore + except StopIteration: + return None - def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: + def _get_folder_id_from_path(self, path: str) -> str | None: """Retrieve folder identifier for the specified path if found Parameters @@ -437,28 +334,15 @@ def _get_folder_id_from_path(self, path: str) -> typing.Optional[str]: str | None if a match is found, return the identifier of the folder """ - params: dict[str, str] = {"filters": json.dumps([f"path == {path}"])} + _ids = Folder.ids(filters=json.dumps([f"path == {path}"])) - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/folders", - headers=self._headers, - params=params, - ) - - if ( - response.status_code == http.HTTPStatus.OK - and (response_data := response.json().get("data")) - and (identifier := response_data[0].get("id")) - ): - return identifier - - return None + return _ids[0] if _ids else None @prettify_pydantic @pydantic.validate_call def delete_runs( self, folder_path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] - ) -> typing.Optional[list]: + ) -> list | None: """Delete runs in a named folder Parameters @@ -477,30 +361,10 @@ def delete_runs( RuntimeError if deletion fails due to server request error """ - folder_id = self._get_folder_id_from_path(folder_path) - - if not folder_id: + if not (_folder := self._get_folder_from_path(folder_path)): raise ValueError(f"Could not find a folder matching '{folder_path}'") - - params: dict[str, bool] = {"runs_only": True, "runs": True} - - response = requests.delete( - f"{self._user_config.server.url}/api/folders/{folder_id}", - headers=self._headers, - params=params, - ) - - if response.status_code == http.HTTPStatus.OK: - if runs := response.json().get("runs", []): - logger.debug(f"Runs from '{folder_path}' deleted successfully: {runs}") - else: - logger.debug("Folder empty, no runs deleted.") - return runs - - raise RuntimeError( - f"Deletion of runs from folder '{folder_path}' failed" - f"with code {response.status_code}: {response.text}" - ) + _delete = _folder.delete(runs_only=True, delete_runs=True, recursive=False) + return _delete.get("runs", []) @prettify_pydantic @pydantic.validate_call @@ -510,7 +374,7 @@ def delete_folder( recursive: bool = False, remove_runs: bool = False, allow_missing: bool = False, - ) -> typing.Optional[list]: + ) -> list | None: """Delete a folder by name Parameters @@ -545,30 +409,11 @@ def delete_folder( raise RuntimeError( f"Deletion of folder '{folder_path}' failed, folder does not exist." ) - - params: dict[str, bool] = {"runs": True} if remove_runs else {} - params |= {"recursive": recursive} - - response = requests.delete( - f"{self._user_config.server.url}/api/folders/{folder_id}", - headers=self._headers, - params=params, + _response = Folder(identifier=folder_id).delete( + delete_runs=remove_runs, recursive=recursive, runs_only=False ) - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Deletion of folder '{folder_path}'", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during folder deletion " - f"but got '{type(json_response)}'" - ) - - runs: list[dict] = json_response.get("runs", []) - return runs + return _response.get("runs", []) @prettify_pydantic @pydantic.validate_call @@ -580,23 +425,11 @@ def delete_alert(self, alert_id: str) -> None: alert_id : str the unique identifier for the alert """ - response = requests.delete( - f"{self._user_config.server.url}/api/alerts/{alert_id}", - headers=self._headers, - ) - - if response.status_code == http.HTTPStatus.OK: - logger.debug(f"Alert '{alert_id}' deleted successfully") - return - - raise RuntimeError( - f"Deletion of alert '{alert_id}' failed" - f"with code {response.status_code}: {response.text}" - ) + Alert(identifier=alert_id).delete() # type: ignore @prettify_pydantic @pydantic.validate_call - def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: + def list_artifacts(self, run_id: str) -> typing.Generator[Artifact, None, None]: """Retrieve artifacts for a given run Parameters @@ -604,70 +437,30 @@ def list_artifacts(self, run_id: str) -> list[dict[str, typing.Any]]: run_id : str unique identifier for the run - Returns - ------- - list[dict[str, typing.Any]] - list of relevant artifacts + Yields + ------ + str, Artifact + ID and artifact entry for relevant artifacts Raises ------ RuntimeError if retrieval of artifacts failed when communicating with the server """ - params: dict[str, str] = {"runs": json.dumps([run_id])} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/artifacts", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of artifacts for run '{run_id}", - response=response, - ) - - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list of entries from JSON response during artifact " - f"retrieval but got '{type(json_response)}'" - ) - return json_response - - def _retrieve_artifact_from_server( - self, - run_id: str, - name: str, - ) -> typing.Union[dict, list]: - params: dict[str, str | None] = {"name": name} - - response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}/artifacts", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of artifact '{name}' for run '{run_id}'", - response=response, - ) - - if isinstance(json_response, dict) and (detail := json_response.get("detail")): - raise RuntimeError(f"Failed to retrieve artifact '{name}': {detail}") - - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list from JSON response during retrieval of " - f"artifact but got '{type(json_response)}'" - ) + return Artifact.get(runs=json.dumps([run_id])) # type: ignore - return json_response + def _retrieve_artifacts_from_server( + self, run_id: str, name: str, count: int | None = None + ) -> typing.Generator[tuple[str, Artifact], None, None]: + return Artifact.get( + runs=json.dumps([run_id]), + filters=json.dumps([f"name == {name}"]), + count=count, + ) # type: ignore @prettify_pydantic @pydantic.validate_call - def abort_run(self, run_id: str, reason: str) -> typing.Union[dict, list]: + def abort_run(self, run_id: str, reason: str) -> dict | list: """Abort a currently active run on the server Parameters @@ -682,27 +475,7 @@ def abort_run(self, run_id: str, reason: str) -> typing.Union[dict, list]: dict | list response from server """ - body: dict[str, str | None] = {"id": run_id, "reason": reason} - - response = requests.put( - f"{self._user_config.server.url}/api/runs/abort", - headers=self._headers, - json=body, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Abort of run '{run_id}'", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected list from JSON response during retrieval of " - f"artifact but got '{type(json_response)}'" - ) - - return json_response + return Run(identifier=run_id).abort(reason=reason) @prettify_pydantic @pydantic.validate_call @@ -731,31 +504,30 @@ def get_artifact( RuntimeError if retrieval of artifact from the server failed """ - json_response = self._retrieve_artifact_from_server(run_id, name) - - if not json_response: - return None - - url = json_response[0]["url"] - mimetype = json_response[0]["type"] - url = json_response[0]["url"] - mimetype = json_response[0]["type"] + _artifact = Artifact.from_name( + run_id=run_id, + name=name, + server_url=self._user_config.server.url, + server_token=self._user_config.server.token, + ) - response = requests.get(url, timeout=DOWNLOAD_TIMEOUT) - response.raise_for_status() + _content = b"".join(_artifact.download_content()) - content: typing.Optional[DeserializedContent] = deserialize_data( - response.content, mimetype, allow_pickle + _deserialized_content: DeserializedContent | None = deserialize_data( + _content, _artifact.mime_type, allow_pickle ) # Numpy array return means just 'if content' will be ambiguous # so must explicitly check if None - return response.content if content is None else content + return _content if _deserialized_content is None else _deserialized_content @prettify_pydantic @pydantic.validate_call def get_artifact_as_file( - self, run_id: str, name: str, path: typing.Optional[str] = None + self, + run_id: str, + name: str, + output_dir: pydantic.DirectoryPath | None = None, ) -> None: """Retrieve the specified artifact in the form of a file @@ -767,7 +539,7 @@ def get_artifact_as_file( unique identifier for the run to be queried name : str the name of the artifact to be retrieved - path : str | None, optional + output_dir: str | None, optional path to download retrieved content to, the default of None uses the current working directory. @@ -777,83 +549,25 @@ def get_artifact_as_file( if there was a failure during retrieval of information from the server """ - json_response = self._retrieve_artifact_from_server(run_id, name) - - if not json_response: - raise RuntimeError( - f"Failed to download artifact '{name}' from run '{run_id}'," - " no results found." - ) - - if not (url := json_response[0].get("url")): - raise RuntimeError( - "Failed to download artifacts, " - "expected URL for retrieval but server " - "did not return result" - ) - - downloader( - { - "url": url, - "filename": os.path.basename(name), - "path": path or os.getcwd(), - } - ) + _artifacts = self._retrieve_artifacts_from_server(run_id, name, count=1) - def _assemble_artifact_downloads( - self, - request_response: requests.Response, - startswith: typing.Optional[str], - endswith: typing.Optional[str], - contains: typing.Optional[str], - out_path: str, - ) -> list[dict[str, str]]: - downloads: list[dict[str, str]] = [] - - for item in request_response.json(): - for key in ("url", "name"): - if key not in item: - raise RuntimeError( - f"Expected key '{key}' in request " - "response during file retrieval" - ) - - if startswith and not item["name"].startswith(startswith): - continue - if contains and contains not in item["name"]: - continue - if endswith and not item["name"].endswith(endswith): - continue - - file_name: str = os.path.basename(item["name"]) - file_dir: str = os.path.join(out_path, os.path.dirname(item["name"])) - - job: dict[str, str] = { - "url": item["url"], - "filename": file_name, - "path": file_dir, - } - - if os.path.isfile(file_path := os.path.join(file_dir, file_name)): - logger.warning(f"File '{file_path}' exists, skipping") - continue - - os.makedirs(job["path"], exist_ok=True) - - downloads.append(job) + try: + _id, _artifact = next(_artifacts) + except StopIteration as e: + raise ValueError(f"No artifact '{name}' found for run '{run_id}'") from e - return downloads + _download_artifact_to_file(_artifact, output_dir) @prettify_pydantic @pydantic.validate_call def get_artifacts_as_files( self, run_id: str, - category: typing.Optional[typing.Literal["input", "output", "code"]] = None, - path: typing.Optional[str] = None, - startswith: typing.Optional[str] = None, - contains: typing.Optional[str] = None, - endswith: typing.Optional[str] = None, + category: typing.Literal["input", "output", "code"] | None = None, + output_dir: pydantic.DirectoryPath | None = None, + startswith: str | None = None, + contains: str | None = None, + endswith: str | None = None, ) -> None: """Retrieve artifacts from the given run as a set of files @@ -861,7 +575,7 @@ def get_artifacts_as_files( ---------- run_id : str the unique identifier for the run - path : str | None, optional + output_dir : str | None, optional location to download files to, the default of None will download them to the current working directory startswith : str, optional @@ -876,43 +590,29 @@ def get_artifacts_as_files( RuntimeError if there was a failure retrieving artifacts from the server """ - params: dict[str, typing.Optional[str]] = {"category": category} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}/artifacts", - headers=self._headers, - params=params, - ) - - self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Download of artifacts for run '{run_id}'", - response=response, - ) - - downloads: list[dict[str, str]] = self._assemble_artifact_downloads( - request_response=response, - startswith=startswith, - endswith=endswith, - contains=contains, - out_path=path or os.getcwd(), - ) + _artifacts: typing.Generator[tuple[str, Artifact], None, None] = Artifact.get( + runs=json.dumps([run_id]), category=category + ) # type: ignore with ThreadPoolExecutor(CONCURRENT_DOWNLOADS) as executor: - futures = [executor.submit(downloader, item) for item in downloads] - for future, download in zip(as_completed(futures), downloads): + futures = [ + executor.submit(_download_artifact_to_file, artifact, output_dir) + for _, artifact in _artifacts + ] + for future, (_, artifact) in zip(as_completed(futures), _artifacts): try: future.result() except Exception as e: raise RuntimeError( - f"Download of file {download['url']} failed with exception: {e}" - ) + f"Download of file {artifact.storage_url} " + f"failed with exception: {e}" + ) from e @prettify_pydantic @pydantic.validate_call def get_folder( self, folder_path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> Folder | None: """Retrieve a folder by identifier Parameters @@ -923,7 +623,7 @@ def get_folder( Returns ------- - dict[str, typing.Any] | None + Folder | None data for the requested folder if it exists else None Raises @@ -931,18 +631,24 @@ def get_folder( RuntimeError if there was a failure when retrieving information from the server """ - if not (_folders := self.get_folders(filters=[f"path == {folder_path}"])): + _folders: typing.Generator[tuple[str, Folder], None, None] = Folder.get( + filters=json.dumps([f"path == {folder_path}"]) + ) # type: ignore + + try: + _, _folder = next(_folders) + return _folder + except StopIteration: return None - return _folders[0] @pydantic.validate_call def get_folders( self, *, - filters: typing.Optional[list[str]] = None, + filters: list[str] | None = None, count: pydantic.PositiveInt = 100, start_index: pydantic.NonNegativeInt = 0, - ) -> list[dict[str, typing.Any]]: + ) -> typing.Generator[tuple[str, Folder], None, None]: """Retrieve folders from the server Parameters @@ -956,48 +662,21 @@ def get_folders( Returns ------- - list[dict[str, Any]] - all data for folders matching the filter request + Generator[str, Folder] + all data for folders matching the filter request in form (id, Folder) Raises ------ RuntimeError if there was a failure retrieving data from the server """ - params: dict[str, typing.Union[str, int]] = { - "filters": json.dumps(filters or []), - "count": count, - "start": start_index, - } - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/folders", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario="Retrieval of folders", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during folder retrieval " - f"but got '{type(json_response)}'" - ) - - if (data := json_response.get("data")) is None: - raise RuntimeError( - "Expected key 'data' in response during folder retrieval" - ) - - return data + return Folder.get( + filters=json.dumps(filters or []), count=count, offset=start_index + ) # type: ignore @prettify_pydantic @pydantic.validate_call - def get_metrics_names(self, run_id: str) -> list[str]: + def get_metrics_names(self, run_id: str) -> typing.Generator[str, None, None]: """Return information on all metrics within a run Parameters @@ -1007,7 +686,7 @@ def get_metrics_names(self, run_id: str) -> list[str]: Returns ------- - list[str] + Generator[str, None, None] names of metrics in the given run Raises @@ -1015,27 +694,10 @@ def get_metrics_names(self, run_id: str) -> list[str]: RuntimeError if there was a failure retrieving information from the server """ - params = {"runs": json.dumps([run_id])} - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/metrics/names", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Request for metric names for run '{run_id}'", - response=response, - ) + _run = Run(identifier=run_id) - if not isinstance(json_response, list): - raise RuntimeError( - "Expected list from JSON response during folder retrieval " - f"but got '{type(json_response)}'" - ) - - return json_response + for id, _ in _run.metrics: + yield id def _get_run_metrics_from_server( self, @@ -1043,9 +705,9 @@ def _get_run_metrics_from_server( run_ids: list[str], xaxis: str, aggregate: bool, - max_points: typing.Optional[int] = None, + max_points: int | None = None, ) -> dict[str, typing.Any]: - params: dict[str, typing.Union[str, int, None]] = { + params: dict[str, str | int | None] = { "runs": json.dumps(run_ids), "aggregate": aggregate, "metrics": json.dumps(metric_names), @@ -1054,24 +716,17 @@ def _get_run_metrics_from_server( } metrics_response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/metrics", + f"{self._user_config.server.url}/metrics", headers=self._headers, params=params, ) - json_response = self._get_json_from_response( + return get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of metrics '{metric_names}' in runs '{run_ids}'", response=metrics_response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response for metric retrieval" - ) - - return json_response - @prettify_pydantic @pydantic.validate_call def get_metric_values( @@ -1080,12 +735,12 @@ def get_metric_values( xaxis: typing.Literal["step", "time", "timestamp"], *, output_format: typing.Literal["dataframe", "dict"] = "dict", - run_ids: typing.Optional[list[str]] = None, - run_filters: typing.Optional[list[str]] = None, + run_ids: list[str] | None = None, + run_filters: list[str] | None = None, use_run_names: bool = False, aggregate: bool = False, - max_points: typing.Optional[pydantic.PositiveInt] = None, - ) -> typing.Union[dict, DataFrame, None]: + max_points: pydantic.PositiveInt | None = None, + ) -> dict | DataFrame | None: """Retrieve the values for a given metric across multiple runs Uses filters to specify which runs should be retrieved. @@ -1135,52 +790,34 @@ def get_metric_values( "'xaxis=timestamp'" ) - if run_filters is not None: - if not (filtered_runs := self.get_runs(filters=run_filters)): - return None + _args = {"filters": json.dumps(run_filters)} if run_filters else {} - run_ids = [run["id"] for run in filtered_runs if run["id"]] + _run_data = dict(Run.get(**_args)) - if use_run_names: - run_labels = [run["name"] for run in filtered_runs] - elif run_ids is not None: - if use_run_names: - run_labels = [ - self.get_run_name_from_id(run_id) for run_id in run_ids if run_id - ] - else: - raise AssertionError( - "Expected either argument 'run_ids' or 'run_filters' for get_metric_values" - ) - - if not run_ids or any(not i for i in run_ids): - raise ValueError( - f"Expected list of run identifiers for 'run_ids' but got '{run_ids}'" + if not ( + _run_metrics := self._get_run_metrics_from_server( + metric_names=metric_names, + run_ids=run_ids or list(_run_data.keys()), + xaxis=xaxis, + aggregate=aggregate, + max_points=max_points, ) - - if not use_run_names: - run_labels = run_ids - - # Now get the metrics for each run - run_metrics = self._get_run_metrics_from_server( - metric_names=metric_names, - run_ids=run_ids, - xaxis=xaxis, - aggregate=aggregate, - max_points=max_points, - ) - - if not run_metrics: + ): return None - if aggregate: return aggregated_metrics_to_dataframe( - run_metrics, xaxis=xaxis, parse_to=output_format - ) - else: - return parse_run_set_metrics( - run_metrics, xaxis=xaxis, run_labels=run_labels, parse_to=output_format + _run_metrics, xaxis=xaxis, parse_to=output_format ) + if use_run_names: + _run_metrics = { + _run_data[key].name: _run_metrics[key] for key in _run_metrics.keys() + } + return parse_run_set_metrics( + _run_metrics, + xaxis=xaxis, + run_labels=list(_run_metrics.keys()), + parse_to=output_format, + ) @check_extra("plot") @prettify_pydantic @@ -1191,7 +828,7 @@ def plot_metrics( metric_names: list[str], xaxis: typing.Literal["step", "time"], *, - max_points: typing.Optional[int] = None, + max_points: int | None = None, ) -> typing.Any: """Plt the time series values for multiple metrics/runs @@ -1273,9 +910,9 @@ def get_events( self, run_id: str, *, - message_contains: typing.Optional[str] = None, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, + message_contains: str | None = None, + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, ) -> list[dict[str, str]]: """Return events for a specified run @@ -1307,7 +944,7 @@ def get_events( else "" ) - params: dict[str, typing.Union[str, int]] = { + params: dict[str, str | int] = { "run": run_id, "filters": msg_filter, "start": start_index or 0, @@ -1315,35 +952,30 @@ def get_events( } response = requests.get( - f"{self._user_config.server.url}/api/events", + f"{self._user_config.server.url}/events", headers=self._headers, params=params, ) - json_response = self._get_json_from_response( + json_response = get_json_from_response( expected_status=[http.HTTPStatus.OK], scenario=f"Retrieval of events for run '{run_id}'", response=response, ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response when retrieving events" - ) - - return response.json().get("data", []) + return json_response.get("data", []) @prettify_pydantic @pydantic.validate_call def get_alerts( self, *, - run_id: typing.Optional[str] = None, + run_id: str | None = None, critical_only: bool = True, names_only: bool = True, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, - ) -> list[dict[str, typing.Any]]: + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, + ) -> list[AlertBase] | list[str | None]: """Retrieve alerts for a given run Parameters @@ -1369,77 +1001,26 @@ def get_alerts( RuntimeError if there was a failure retrieving data from the server """ - params: dict[str, int] = {"count": count_limit or 0, "start": start_index or 0} - if not run_id: - response = requests.get( - f"{self._user_config.server.url}/api/alerts/", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Retrieval of alerts for run '{run_id}'", - response=response, - ) - else: - response = requests.get( - f"{self._user_config.server.url}/api/runs/{run_id}", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[200], - scenario=f"Retrieval of alerts for run '{run_id}'", - response=response, - ) - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response when retrieving alerts" - ) - - if run_id and (alerts := json_response.get("alerts")) is None: - raise RuntimeError( - "Expected key 'alerts' in response when retrieving " - f"alerts for run '{run_id}': {json_response}" - ) - elif not run_id and (alerts := json_response.get("data")) is None: - raise RuntimeError( - "Expected key 'data' in response when retrieving " - f"alerts: {json_response}" - ) - - if run_id and critical_only: - if names_only: - return [ - alert["alert"].get("name") - for alert in alerts - if alert["status"].get("current") == "critical" - ] - else: - return [ - alert - for alert in alerts - if alert["status"].get("current") == "critical" - ] - if names_only: - if run_id: - return [alert["alert"].get("name") for alert in alerts] - else: - return [alert.get("name") for alert in alerts] + if not run_id: + return [alert.name if names_only else alert for _, alert in Alert.get()] # type: ignore - return alerts + return [ + alert.get("name") + if names_only + else Alert(identifier=alert.get("id"), **alert) + for alert in Run(identifier=run_id).get_alert_details() + if not critical_only or alert["status"].get("current") == "critical" + ] # type: ignore @prettify_pydantic @pydantic.validate_call def get_tags( self, *, - start_index: typing.Optional[pydantic.NonNegativeInt] = None, - count_limit: typing.Optional[pydantic.PositiveInt] = None, - ) -> list[dict]: + start_index: pydantic.NonNegativeInt | None = None, + count_limit: pydantic.PositiveInt | None = None, + ) -> typing.Generator[Tag, None, None]: """Retrieve tags Parameters @@ -1451,34 +1032,15 @@ def get_tags( Returns ------- - list[dict[str, Any]] - a list of all tags for this run which match the constrains specified + list[Tag] + a list of all tags for this run Raises ------ RuntimeError if there was a failure retrieving data from the server """ - params = {"count": count_limit or 0, "start": start_index or 0} - response = requests.get( - f"{self._user_config.server.url}/api/tags", - headers=self._headers, - params=params, - ) - - json_response = self._get_json_from_response( - expected_status=[200], - scenario="Retrieval of tags", - response=response, - ) - - if not isinstance(json_response, dict): - raise RuntimeError("Expected list from JSON response when retrieving tags") - - if not (data := json_response.get("data")): - raise RuntimeError("Expected key 'data' in response during tags retrieval") - - return data + return Tag.get(count=count_limit, offset=start_index) @prettify_pydantic @pydantic.validate_call @@ -1495,31 +1057,12 @@ def delete_tag(self, tag_id: str) -> None: RuntimeError if the deletion failed due to a server request error """ - - response = requests.delete( - f"{self._user_config.server.url}/api/tags/{tag_id}", - headers=self._headers, - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK], - scenario=f"Deletion of tag '{tag_id}'", - response=response, - ) - - logger.debug(f"Tag '{tag_id}' deleted successfully") - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during run deletion " - f"but got '{type(json_response)}'" - ) - - return json_response or None + with contextlib.suppress(ValueError): + Tag(identifier=tag_id).delete() @prettify_pydantic @pydantic.validate_call - def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: + def get_tag(self, tag_id: str) -> Tag | None: """Retrieve a single tag Parameters @@ -1529,7 +1072,7 @@ def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: Returns ------- - dict[str, Any] + Tag response containing information on the given tag Raises @@ -1537,23 +1080,7 @@ def get_tag(self, tag_id: str) -> typing.Optional[dict[str, typing.Any]]: RuntimeError if retrieval of information from the server on this tag failed """ - - response: requests.Response = requests.get( - f"{self._user_config.server.url}/api/tag/{tag_id}", headers=self._headers - ) - - json_response = self._get_json_from_response( - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of tag '{tag_id}'", - response=response, - ) - - if response.status_code == http.HTTPStatus.NOT_FOUND: + try: + return Tag(identifier=tag_id) + except ObjectNotFoundError: return None - - if not isinstance(json_response, dict): - raise RuntimeError( - "Expected dictionary from JSON response during tag retrieval " - f"but got '{type(json_response)}'" - ) - return json_response diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 775cd625..472811f6 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -14,6 +14,7 @@ import simvue.models as sv_models from simvue.utilities import get_expiry +from simvue.api.url import URL logger = logging.getLogger(__file__) @@ -25,8 +26,11 @@ class ServerSpecifications(pydantic.BaseModel): @pydantic.field_validator("url") @classmethod - def url_to_str(cls, v: typing.Any) -> str: - return f"{v}" + def url_to_api_url(cls, v: typing.Any) -> str: + if f"{v}".endswith("/api"): + return f"{v}" + _url = URL(f"{v}") / "api" + return f"{_url}" @pydantic.field_validator("token") def check_token(cls, v: typing.Any) -> str: @@ -35,24 +39,25 @@ def check_token(cls, v: typing.Any) -> str: raise AssertionError("Failed to parse Simvue token - invalid token form") if time.time() - expiry > 0: raise AssertionError("Simvue token has expired") - return value + return v class OfflineSpecifications(pydantic.BaseModel): - cache: typing.Optional[pathlib.Path] = None + cache: pathlib.Path | None = None - @pydantic.field_validator("cache") - @classmethod - def cache_to_str(cls, v: typing.Any) -> str: - return f"{v}" + +class MetricsSpecifications(pydantic.BaseModel): + resources_metrics_interval: pydantic.PositiveInt | None = -1 + emission_metrics_interval: pydantic.PositiveInt | None = None + enable_emission_metrics: bool = False class DefaultRunSpecifications(pydantic.BaseModel): - name: typing.Optional[str] = None - description: typing.Optional[str] = None - tags: typing.Optional[list[str]] = None + name: str | None = None + description: str | None = None + tags: list[str] | None = None folder: str = pydantic.Field("/", pattern=sv_models.FOLDER_REGEX) - metadata: typing.Optional[dict[str, typing.Union[str, int, float, bool]]] = None + metadata: dict[str, str | int | float | bool] | None = None mode: typing.Literal["offline", "disabled", "online"] = "online" diff --git a/simvue/config/user.py b/simvue/config/user.py index 8acba551..931d2fa5 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -20,6 +20,7 @@ from simvue.config.parameters import ( ClientGeneralOptions, DefaultRunSpecifications, + MetricsSpecifications, ServerSpecifications, OfflineSpecifications, ) @@ -30,14 +31,13 @@ DEFAULT_OFFLINE_DIRECTORY, ) from simvue.version import __version__ -from simvue.api import get +from simvue.api.request import get as sv_get +from simvue.api.url import URL logger = logging.getLogger(__name__) -SIMVUE_SERVER_UPPER_CONSTRAINT: typing.Optional[semver.Version] = semver.Version.parse( - "1.0.0" -) -SIMVUE_SERVER_LOWER_CONSTRAINT: typing.Optional[semver.Version] = None +SIMVUE_SERVER_UPPER_CONSTRAINT: semver.Version | None = semver.Version.parse("2.0.0") +SIMVUE_SERVER_LOWER_CONSTRAINT: semver.Version | None = semver.Version.parse("1.0.0") class SimvueConfiguration(pydantic.BaseModel): @@ -49,21 +49,22 @@ class SimvueConfiguration(pydantic.BaseModel): ) run: DefaultRunSpecifications = DefaultRunSpecifications() offline: OfflineSpecifications = OfflineSpecifications() + metrics: MetricsSpecifications = MetricsSpecifications() @classmethod - def _load_pyproject_configs(cls) -> typing.Optional[dict]: + def _load_pyproject_configs(cls) -> dict | None: """Recover any Simvue non-authentication configurations from pyproject.toml""" _pyproject_toml = sv_util.find_first_instance_of_file( file_names=["pyproject.toml"], check_user_space=False ) if not _pyproject_toml: - return + return None _project_data = toml.load(_pyproject_toml) if not (_simvue_setup := _project_data.get("tool", {}).get("simvue")): - return + return None # Do not allow reading of authentication credentials within a project file _server_credentials = _simvue_setup.get("server", {}) @@ -95,18 +96,21 @@ def _check_server( "User-Agent": f"Simvue Python client {__version__}", } try: - response = get(f"{url}/api/version", headers) + _url = URL(url) / "version" + _response = sv_get(f"{_url}", headers) - if response.status_code != http.HTTPStatus.OK or not ( - _version_str := response.json().get("version") + if _response.status_code != http.HTTPStatus.OK or not ( + _version_str := _response.json().get("version") ): raise AssertionError - if response.status_code == http.HTTPStatus.UNAUTHORIZED: + if _response.status_code == http.HTTPStatus.UNAUTHORIZED: raise AssertionError("Unauthorised token") except Exception as err: - raise AssertionError(f"Exception retrieving server version: {str(err)}") + raise AssertionError( + f"Exception retrieving server version:\n {str(err)}" + ) from err _version = semver.Version.parse(_version_str) @@ -124,6 +128,11 @@ def _check_server( f"< {SIMVUE_SERVER_LOWER_CONSTRAINT}" ) + @pydantic.validate_call + def write(self, out_directory: pydantic.DirectoryPath) -> None: + with out_directory.joinpath(CONFIG_FILE_NAMES[0]).open("w") as out_f: + toml.dump(self.model_dump(), out_f) + @pydantic.model_validator(mode="after") @classmethod def check_valid_server(cls, values: "SimvueConfiguration") -> bool: @@ -138,9 +147,9 @@ def check_valid_server(cls, values: "SimvueConfiguration") -> bool: @sv_util.prettify_pydantic def fetch( cls, - server_url: typing.Optional[str] = None, - server_token: typing.Optional[str] = None, - mode: typing.Optional[typing.Literal["offline", "online", "disabled"]] = None, + server_url: str | None = None, + server_token: str | None = None, + mode: typing.Literal["offline", "online", "disabled"] | None = None, ) -> "SimvueConfiguration": """Retrieve the Simvue configuration from this project @@ -149,7 +158,7 @@ def fetch( Parameters ---------- - server_url : str, optional + server_url : str | URL, optional override the URL used for this session server_token : str, optional override the token used for this session @@ -165,8 +174,6 @@ def fetch( _config_dict: dict[str, dict[str, str]] = cls._load_pyproject_configs() or {} try: - logger.info(f"Using config file '{cls.config_file()}'") - # NOTE: Legacy INI support has been removed _config_dict |= toml.load(cls.config_file()) @@ -190,17 +197,20 @@ def fetch( _config_dict["offline"]["cache"] = _default_dir # Ranking of configurations for token and URl is: - # Envionment Variables > Run Definition > Configuration File + # Environment Variables > Run Definition > Configuration File _server_url = os.environ.get( "SIMVUE_URL", server_url or _config_dict["server"].get("url") ) + if isinstance(_server_url, URL): + _server_url = str(_server_url) + _server_token = os.environ.get( "SIMVUE_TOKEN", server_token or _config_dict["server"].get("token") ) - _run_mode = mode or _config_dict["run"].get("mode") + _run_mode = mode or _config_dict["run"].get("mode") or "online" if not _server_url: raise RuntimeError("No server URL was specified") @@ -218,10 +228,8 @@ def fetch( @functools.lru_cache def config_file(cls) -> pathlib.Path: """Returns the path of top level configuration file used for the session""" - _config_file: typing.Optional[pathlib.Path] = ( - sv_util.find_first_instance_of_file( - CONFIG_FILE_NAMES, check_user_space=True - ) + _config_file: pathlib.Path | None = sv_util.find_first_instance_of_file( + CONFIG_FILE_NAMES, check_user_space=True ) # NOTE: Legacy INI support has been removed diff --git a/simvue/converters.py b/simvue/converters.py index cdc3135b..02d1dbcb 100644 --- a/simvue/converters.py +++ b/simvue/converters.py @@ -8,6 +8,8 @@ import typing import pandas +import flatdict + if typing.TYPE_CHECKING: from pandas import DataFrame @@ -17,9 +19,7 @@ def aggregated_metrics_to_dataframe( request_response_data: dict[str, list[dict[str, float]]], xaxis: str, parse_to: typing.Literal["dict", "dataframe"] = "dict", -) -> typing.Union[ - "DataFrame", dict[str, dict[tuple[float, str], typing.Optional[float]]] -]: +) -> typing.Union["DataFrame", dict[str, dict[tuple[float, str], float]] | None]: """Create data frame for an aggregate of metrics Returns a dataframe with columns being metrics and sub-columns being the @@ -42,14 +42,12 @@ def aggregated_metrics_to_dataframe( """ _all_steps: list[float] = sorted( - set( - ( - d[xaxis] - for sublist in request_response_data.values() - for d in sublist - if xaxis in d - ) - ) + { + d[xaxis] + for sublist in request_response_data.values() + for d in sublist + if xaxis in d + } ) # Get the keys from the aggregate which are not the xaxis label @@ -58,8 +56,8 @@ def aggregated_metrics_to_dataframe( _value_types = list(_value_types) _value_types.remove(xaxis) - result_dict: dict[str, dict[tuple[float, str], typing.Optional[float]]] = { - metric_name: {} for metric_name in request_response_data.keys() + result_dict: dict[str, dict[tuple[float, str], float]] | None = { + metric_name: {} for metric_name in request_response_data } for metric_name, metrics in request_response_data.items(): @@ -91,9 +89,7 @@ def parse_run_set_metrics( xaxis: str, run_labels: list[str], parse_to: typing.Literal["dict", "dataframe"] = "dict", -) -> typing.Union[ - dict[str, dict[tuple[float, str], typing.Optional[float]]], "DataFrame" -]: +) -> typing.Union[dict[str, dict[tuple[float, str], float]] | None, "DataFrame"]: """Parse JSON response metric data from the server into the specified form Creates either a dictionary or a pandas dataframe of the data collected @@ -112,7 +108,7 @@ def parse_run_set_metrics( Returns ------- - dict[str, dict[tuple[float, str], typing.Optional[float]]] | DataFrame + dict[str, dict[tuple[float, str], float]] | None | DataFrame either a dictionary or Pandas DataFrame containing the results Raises @@ -121,31 +117,20 @@ def parse_run_set_metrics( if an unrecognised parse format is specified """ if not request_response_data: - if parse_to == "dataframe": - return pandas.DataFrame({}) - else: - return {} + return pandas.DataFrame({}) if parse_to == "dataframe" else {} _all_steps: list[float] = sorted( - set( - ( - d[xaxis] - for run_data in request_response_data.values() - for sublist in run_data.values() - for d in sublist - if xaxis in d - ) - ) + { + d[xaxis] + for run_data in request_response_data.values() + for sublist in run_data.values() + for d in sublist + if xaxis in d + } ) _all_metrics: list[str] = sorted( - set( - ( - key - for run_data in request_response_data.values() - for key in run_data.keys() - ) - ) + {key for run_data in request_response_data.values() for key in run_data.keys()} ) # Get the keys from the aggregate which are not the xaxis label @@ -155,7 +140,7 @@ def parse_run_set_metrics( _value_types = list(_value_types) _value_types.remove(xaxis) - result_dict: dict[str, dict[tuple[float, str], typing.Optional[float]]] = { + result_dict: dict[str, dict[tuple[float, str], float]] | None = { metric_name: {} for metric_name in _all_metrics } @@ -176,63 +161,58 @@ def parse_run_set_metrics( result_dict[metric_name][step, run_label] = next_item.get("value") if parse_to == "dataframe": - _data_frame = pandas.DataFrame( + return pandas.DataFrame( result_dict, index=pandas.MultiIndex.from_product( [_all_steps, run_labels], names=(xaxis, "run") ), ) - return _data_frame elif parse_to == "dict": return result_dict else: raise ValueError(f"Unrecognised parse format '{parse_to}'") -def to_dataframe(data): +def to_dataframe(data) -> pandas.DataFrame: """ Convert runs to dataframe """ metadata = [] - for run in data: - if "metadata" in run: - for item in run["metadata"]: - if item not in metadata: - metadata.append(item) + system_columns = [] + columns = { + "name": [], + "status": [], + "folder": [], + "created": [], + "started": [], + "ended": [], + } - columns = {} for run in data: - for item in ("name", "status", "folder", "created", "started", "ended"): - if item not in columns: - columns[item] = [] - if item in run: - columns[item].append(run[item]) - else: - columns[item].append(None) - - if "system" in run: - for section in run["system"]: - if section in ("cpu", "gpu", "platform"): - for item in run["system"][section]: - if "system.%s.%s" % (section, item) not in columns: - columns["system.%s.%s" % (section, item)] = [] - columns["system.%s.%s" % (section, item)].append( - run["system"][section][item] - ) - else: - if "system.%s" % section not in columns: - columns["system.%s" % section] = [] - columns["system.%s" % section].append(run["system"][section]) - - if "metadata" in run: - for item in metadata: - if "metadata.%s" % item not in columns: - columns["metadata.%s" % item] = [] - if item in run["metadata"]: - columns["metadata.%s" % item].append(run["metadata"][item]) - else: - columns["metadata.%s" % item].append(None) + for item in run.get("metadata", []): + if item not in metadata: + metadata.append(item) + for item, value in (run.get("system", {}) or {}).items(): + if isinstance(value, dict): + system_columns += [ + col_name + for sub_item in value.keys() + if (col_name := f"system.{item}.{sub_item}") not in system_columns + ] + elif f"system.{item}" not in system_columns: + system_columns.append(f"system.{item}") + + columns |= {f"metadata.{column}": [] for column in metadata} | { + column: [] for column in system_columns + } + for run in data: + run_info = flatdict.FlatDict(run, delimiter=".") + for column, value_ in columns.items(): + try: + value_.append(run_info.get(column)) + except TypeError: + value_.append(None) return pandas.DataFrame(data=columns) @@ -240,7 +220,7 @@ def to_dataframe(data): def metric_time_series_to_dataframe( data: list[dict[str, float]], xaxis: typing.Literal["step", "time", "timestamp"], - name: typing.Optional[str] = None, + name: str | None = None, ) -> "DataFrame": """Convert a single metric value set from a run into a dataframe diff --git a/simvue/eco.py b/simvue/eco.py index 3964f53f..6ff7023b 100644 --- a/simvue/eco.py +++ b/simvue/eco.py @@ -3,7 +3,6 @@ import datetime from codecarbon import EmissionsTracker -from codecarbon.external.logger import logger from codecarbon.output_methods.base_output import BaseOutput as cc_BaseOutput from simvue.utilities import simvue_timestamp @@ -12,21 +11,27 @@ from codecarbon.output_methods.emissions_data import EmissionsData +logger = logging.getLogger(__file__) + + class CodeCarbonOutput(cc_BaseOutput): def __init__(self, run: "Run") -> None: - self._meta_update: bool = True self._simvue_run = run self._metrics_step: int = 0 - def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: + def out( + self, total: "EmissionsData", delta: "EmissionsData", meta_update: bool = True + ) -> None: # Check if the run has been shutdown, if so do nothing if ( self._simvue_run._shutdown_event and self._simvue_run._shutdown_event.is_set() ): + logger.debug("Terminating CodeCarbon tracker") return - if self._meta_update: + if meta_update: + logger.debug("Logging CodeCarbon metadata") self._simvue_run.update_metadata( { "codecarbon.country": total.country_name, @@ -35,12 +40,12 @@ def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: "codecarbon.version": total.codecarbon_version, } ) - self._meta_update = False _cc_timestamp: datetime.datetime = datetime.datetime.strptime( total.timestamp, "%Y-%m-%dT%H:%M:%S" ) + logger.debug("Logging CodeCarbon metrics") self._simvue_run.log_metrics( metrics={ "codecarbon.total.emissions": total.emissions, @@ -54,7 +59,7 @@ def out(self, total: "EmissionsData", delta: "EmissionsData") -> None: self._metrics_step += 1 def live_out(self, total: "EmissionsData", delta: "EmissionsData") -> None: - self.out(total, delta) + self.out(total, delta, meta_update=False) class SimvueEmissionsTracker(EmissionsTracker): @@ -81,3 +86,4 @@ def set_measure_interval(self, interval: int) -> None: def post_init(self) -> None: self._set_from_conf(self._simvue_run._id, "experiment_id") self._set_from_conf(self._simvue_run._name, "experiment_name") + self.start() diff --git a/simvue/exception.py b/simvue/exception.py new file mode 100644 index 00000000..3dc5e65e --- /dev/null +++ b/simvue/exception.py @@ -0,0 +1,24 @@ +""" +Simvue Exception Types +====================== + +Custom exceptions for handling of Simvue request scenarions. + +""" + + +class ObjectNotFoundError(Exception): + """For failure retrieving Simvue object from server""" + + def __init__(self, obj_type: str, name: str, extra: str | None = None) -> None: + super().__init__( + f"Failed to retrieve '{name}' of type '{obj_type}' " + f"{f'{extra}, ' if extra else ''}" + "no such object" + ) + + +class SimvueRunError(RuntimeError): + """A special sub-class of runtime error specifically for Simvue run errors""" + + pass diff --git a/simvue/executor.py b/simvue/executor.py index e73284b8..a6bc7c4f 100644 --- a/simvue/executor.py +++ b/simvue/executor.py @@ -35,13 +35,13 @@ def __call__(self, *, status_code: int, std_out: str, std_err: str) -> None: ... def _execute_process( proc_id: str, - command: typing.List[str], + command: list[str], runner_name: str, - completion_callback: typing.Optional[CompletionCallback] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, - environment: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, -) -> tuple[subprocess.Popen, typing.Optional[threading.Thread]]: + completion_callback: CompletionCallback | None = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, + environment: dict[str, str] | None = None, + cwd: pathlib.Path | None = None, +) -> tuple[subprocess.Popen, threading.Thread | None]: thread_out = None with open(f"{runner_name}_{proc_id}.err", "w") as err: @@ -58,8 +58,8 @@ def _execute_process( if completion_callback or completion_trigger: def trigger_check( - completion_callback: typing.Optional[CompletionCallback], - trigger_to_set: typing.Optional[multiprocessing.synchronize.Event], + completion_callback: CompletionCallback | None, + trigger_to_set: multiprocessing.synchronize.Event | None, process: subprocess.Popen, ) -> None: while process.poll() is None: @@ -67,12 +67,8 @@ def trigger_check( if trigger_to_set: trigger_to_set.set() if completion_callback: - with open(f"{runner_name}_{proc_id}.err") as err: - std_err = err.read() - - with open(f"{runner_name}_{proc_id}.out") as out: - std_out = out.read() - + std_err = pathlib.Path(f"{runner_name}_{proc_id}.err").read_text() + std_out = pathlib.Path(f"{runner_name}_{proc_id}.out").read_text() completion_callback( status_code=process.returncode, std_out=std_out, @@ -109,23 +105,23 @@ def __init__(self, simvue_runner: "simvue.Run", keep_logs: bool = True) -> None: """ self._runner = simvue_runner self._keep_logs = keep_logs - self._completion_callbacks: dict[str, typing.Optional[CompletionCallback]] = {} + self._completion_callbacks: dict[str, CompletionCallback] | None = {} self._completion_triggers: dict[ - str, typing.Optional[multiprocessing.synchronize.Event] + str, multiprocessing.synchronize.Event | None ] = {} - self._completion_processes: dict[str, typing.Optional[threading.Thread]] = {} + self._completion_processes: dict[str, threading.Thread] | None = {} self._alert_ids: dict[str, str] = {} self.command_str: dict[str, str] = {} self._processes: dict[str, subprocess.Popen] = {} - def std_out(self, process_id: str) -> typing.Optional[str]: + def std_out(self, process_id: str) -> str | None: if not os.path.exists(out_file := f"{self._runner.name}_{process_id}.out"): return None with open(out_file) as out: return out.read() or None - def std_err(self, process_id: str) -> typing.Optional[str]: + def std_err(self, process_id: str) -> str | None: if not os.path.exists(err_file := f"{self._runner.name}_{process_id}.err"): return None @@ -136,13 +132,13 @@ def add_process( self, identifier: str, *args, - executable: typing.Optional[str] = None, - script: typing.Optional[pathlib.Path] = None, - input_file: typing.Optional[pathlib.Path] = None, - env: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, - completion_callback: typing.Optional[CompletionCallback] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, + executable: str | None = None, + script: pathlib.Path | None = None, + input_file: pathlib.Path | None = None, + env: dict[str, str] | None = None, + cwd: pathlib.Path | None = None, + completion_callback: CompletionCallback | None = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, **kwargs, ) -> None: """Add a process to be executed to the executor. @@ -187,9 +183,9 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: input_file : str | None, optional the input file to run, note this only work if the input file is not an option, if this is the case you should provide it as such and perform the upload manually, by default None - env : typing.Dict[str, str], optional + env : dict[str, str], optional environment variables for process - cwd: typing.Optional[pathlib.Path], optional + cwd: pathlib.Path | None, optional working directory to execute the process within completion_callback : typing.Callable | None, optional callback to run when process terminates @@ -213,7 +209,7 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: if input_file: self._runner.save_file(file_path=input_file, category="input") - command: typing.List[str] = [] + command: list[str] = [] if executable: command += [f"{executable}"] @@ -233,15 +229,15 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: arg = arg.replace("_", "-") if len(arg) == 1: - if isinstance(value, bool) and value: - command += [f"-{arg}"] - else: - command += [f"-{arg}", f"{value}"] + command += ( + [f"-{arg}"] + if isinstance(value, bool) and value + else [f"-{arg}", f"{value}"] + ) + elif isinstance(value, bool) and value: + command += [f"--{arg}"] else: - if isinstance(value, bool) and value: - command += [f"--{arg}"] - else: - command += [f"--{arg}", f"{value}"] + command += [f"--{arg}", f"{value}"] command += pos_args @@ -261,8 +257,8 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: ) ) - self._alert_ids[identifier] = self._runner.create_alert( - name=f"{identifier}_exit_status", source="user" + self._alert_ids[identifier] = self._runner.create_user_alert( + name=f"{identifier}_exit_status" ) if not self._alert_ids[identifier]: @@ -296,16 +292,14 @@ def success(self) -> int: @property def exit_status(self) -> int: """Returns the first non-zero exit status if applicable""" - _non_zero = [ + if _non_zero := [ i.returncode for i in self._processes.values() if i.returncode != 0 - ] - - if _non_zero: + ]: return _non_zero[0] return 0 - def get_error_summary(self) -> dict[str, typing.Optional[str]]: + def get_error_summary(self) -> dict[str, str] | None: """Returns the summary messages of all errors""" return { identifier: self._get_error_status(identifier) @@ -330,8 +324,8 @@ def get_command(self, process_id: str) -> str: raise KeyError(f"Failed to retrieve '{process_id}', no such process") return self.command_str[process_id] - def _get_error_status(self, process_id: str) -> typing.Optional[str]: - err_msg: typing.Optional[str] = None + def _get_error_status(self, process_id: str) -> str | None: + err_msg: str | None = None # Return last 10 lines of stdout if stderr empty if not (err_msg := self.std_err(process_id)) and ( @@ -344,6 +338,9 @@ def _get_error_status(self, process_id: str) -> typing.Optional[str]: def _update_alerts(self) -> None: """Send log events for the result of each process""" + # Wait for the dispatcher to send the latest information before + # allowing the executor to finish (and as such the run instance to exit) + _wait_limit: float = 1 for proc_id, process in self._processes.items(): if process.returncode != 0: # If the process fails then purge the dispatcher event queue @@ -355,9 +352,6 @@ def _update_alerts(self) -> None: else: self._runner.log_alert(self._alert_ids[proc_id], "ok") - # Wait for the dispatcher to send the latest information before - # allowing the executor to finish (and as such the run instance to exit) - _wait_limit: float = 1 _current_time: float = 0 while ( self._runner._dispatcher @@ -380,7 +374,7 @@ def _save_output(self) -> None: ) def kill_process( - self, process_id: typing.Union[int, str], kill_children_only: bool = False + self, process_id: int | str, kill_children_only: bool = False ) -> None: """Kill a running process by ID @@ -389,7 +383,7 @@ def kill_process( Parameters ---------- - process_id : typing.Union[int, str] + process_id : int | str either the identifier for a client created process or the PID of an external process kill_children_only : bool, optional diff --git a/simvue/factory/proxy/__init__.py b/simvue/factory/proxy/__init__.py index ce92e57e..dbcf0cba 100644 --- a/simvue/factory/proxy/__init__.py +++ b/simvue/factory/proxy/__init__.py @@ -16,7 +16,7 @@ def Simvue( - name: typing.Optional[str], + name: str | None, uniq_id: str, mode: str, config: "SimvueConfiguration", diff --git a/simvue/factory/proxy/base.py b/simvue/factory/proxy/base.py index 2dc3c13d..6f35e691 100644 --- a/simvue/factory/proxy/base.py +++ b/simvue/factory/proxy/base.py @@ -7,15 +7,15 @@ class SimvueBaseClass(abc.ABC): @abc.abstractmethod def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, suppress_errors: bool, ) -> None: self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._suppress_errors: bool = suppress_errors self._uuid: str = uniq_id - self._name: typing.Optional[str] = name - self._id: typing.Optional[int] = None + self._name: str | None = name + self._id: int | None = None self._aborted: bool = False def _error(self, message: str) -> None: @@ -24,46 +24,37 @@ def _error(self, message: str) -> None: """ if not self._suppress_errors: raise RuntimeError(message) - else: - self._logger.error(message) - self._aborted = True + self._logger.error(message) + self._aborted = True @abc.abstractmethod - def list_tags(self) -> typing.Optional[list[str]]: + def list_tags(self) -> list[str] | None: pass @abc.abstractmethod - def create_run( - self, data: dict[str, typing.Any] - ) -> tuple[typing.Optional[str], typing.Optional[str]]: + def create_run(self, data: dict[str, typing.Any]) -> tuple[str, str | None]: pass @abc.abstractmethod - def update( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def update(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def add_alert( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def add_alert(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod def set_alert_state( self, alert_id: str, status: str - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: pass @abc.abstractmethod @@ -71,19 +62,15 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: pass @abc.abstractmethod - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: pass @abc.abstractmethod - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: pass @abc.abstractmethod diff --git a/simvue/factory/proxy/offline.py b/simvue/factory/proxy/offline.py index 9d85329e..650c0d36 100644 --- a/simvue/factory/proxy/offline.py +++ b/simvue/factory/proxy/offline.py @@ -16,9 +16,6 @@ skip_if_failed, ) -if typing.TYPE_CHECKING: - pass - logger = logging.getLogger(__name__) @@ -29,15 +26,15 @@ class Offline(SimvueBaseClass): def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, config: SimvueConfiguration, suppress_errors: bool = True, ) -> None: super().__init__(name=name, uniq_id=uniq_id, suppress_errors=suppress_errors) - _offline_dir = config.offline.cache - self._directory: str = os.path.join(_offline_dir, self._uuid) + _offline_dir: pathlib.Path = config.offline.cache + self._directory: str = _offline_dir.joinpath(self._uuid) os.makedirs(self._directory, exist_ok=True) @@ -60,7 +57,7 @@ def _write_json(self, filename: str, data: dict[str, typing.Any]) -> None: @skip_if_failed("_aborted", "_suppress_errors", None) def _mock_api_post( self, prefix: str, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: unique_id = time.time() filename = os.path.join(self._directory, f"{prefix}-{unique_id}.json") if not data.get("id"): @@ -69,7 +66,7 @@ def _mock_api_post( return data @skip_if_failed("_aborted", "_suppress_errors", (None, None)) - def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[str]]: + def create_run(self, data) -> tuple[str, str | None]: """ Create a run """ @@ -101,10 +98,10 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[str]]: filename = f"{self._directory}/{status}" create_file(filename) - return (self._name, self._id) + return self._name, self._id @skip_if_failed("_aborted", "_suppress_errors", None) - def update(self, data) -> typing.Optional[dict[str, typing.Any]]: + def update(self, data) -> dict[str, typing.Any] | None: """ Update metadata, tags or status """ @@ -131,7 +128,7 @@ def update(self, data) -> typing.Optional[dict[str, typing.Any]]: return data @skip_if_failed("_aborted", "_suppress_errors", None) - def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data) -> dict[str, typing.Any] | None: """ Set folder details """ @@ -141,9 +138,7 @@ def set_folder_details(self, data) -> typing.Optional[dict[str, typing.Any]]: return data @skip_if_failed("_aborted", "_suppress_errors", None) - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Save file """ @@ -157,9 +152,7 @@ def save_file( self._write_json(filename, prepare_for_api(data, False)) return data - def add_alert( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def add_alert(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Add an alert """ @@ -168,7 +161,7 @@ def add_alert( @skip_if_failed("_aborted", "_suppress_errors", None) def set_alert_state( self, alert_id: str, status: str - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: if not os.path.exists( _alert_file := os.path.join(self._directory, f"alert-{alert_id}.json") ): @@ -203,24 +196,20 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: for alert_file in glob.glob(os.path.join(self._directory, "alert-*.json")) ] - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send metrics """ return self._mock_api_post("metrics", data) - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send event """ return self._mock_api_post("event", data) @skip_if_failed("_aborted", "_suppress_errors", None) - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: logger.debug( f"Creating heartbeat file: {os.path.join(self._directory, 'heartbeat')}" ) diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index 4f3077f2..c1860dac 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -5,7 +5,7 @@ if typing.TYPE_CHECKING: from simvue.config.user import SimvueConfiguration -from simvue.api import get, post, put +from simvue.api.request import get, post, put from simvue.factory.proxy.base import SimvueBaseClass from simvue.utilities import prepare_for_api, skip_if_failed from simvue.version import __version__ @@ -23,7 +23,7 @@ class Remote(SimvueBaseClass): def __init__( self, - name: typing.Optional[str], + name: str | None, uniq_id: str, config: "SimvueConfiguration", suppress_errors: bool = True, @@ -46,7 +46,7 @@ def list_tags(self) -> list[str]: logger.debug("Retrieving existing tags") try: response = get( - f"{self._user_config.server.url}/api/runs/{self._id}", self._headers + f"{self._user_config.server.url}/runs/{self._id}", self._headers ) except Exception as err: self._error(f"Exception retrieving tags: {str(err)}") @@ -66,13 +66,10 @@ def list_tags(self) -> list[str]: ) return [] - if response.status_code == http.HTTPStatus.OK: - return data - - return [] + return data if response.status_code == http.HTTPStatus.OK else [] @skip_if_failed("_aborted", "_suppress_errors", (None, None)) - def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: + def create_run(self, data) -> tuple[str, str | None]: """ Create a run """ @@ -80,7 +77,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: logger.debug("Creating folder %s if necessary", data.get("folder")) try: response = post( - f"{self._user_config.server.url}/api/folders", + f"{self._user_config.server.url}/folders", self._headers, {"path": data.get("folder")}, ) @@ -104,9 +101,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: logger.debug('Creating run with data: "%s"', data) try: - response = post( - f"{self._user_config.server.url}/api/runs", self._headers, data - ) + response = post(f"{self._user_config.server.url}/runs", self._headers, data) except Exception as err: self._error(f"Exception creating run: {str(err)}") return (None, None) @@ -135,7 +130,7 @@ def create_run(self, data) -> tuple[typing.Optional[str], typing.Optional[int]]: @skip_if_failed("_aborted", "_suppress_errors", None) def update( self, data: dict[str, typing.Any], _=None - ) -> typing.Optional[dict[str, typing.Any]]: + ) -> dict[str, typing.Any] | None: """ Update metadata, tags or status """ @@ -145,9 +140,7 @@ def update( logger.debug('Updating run with data: "%s"', data) try: - response = put( - f"{self._user_config.server.url}/api/runs", self._headers, data - ) + response = put(f"{self._user_config.server.url}/runs", self._headers, data) except Exception as err: self._error(f"Exception updating run: {err}") return None @@ -165,9 +158,7 @@ def update( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def set_folder_details( - self, data, run=None - ) -> typing.Optional[dict[str, typing.Any]]: + def set_folder_details(self, data, run=None) -> dict[str, typing.Any] | None: """ Set folder details """ @@ -176,7 +167,7 @@ def set_folder_details( try: response = post( - f"{self._user_config.server.url}/api/folders", self._headers, data + f"{self._user_config.server.url}/folders", self._headers, data ) except Exception as err: self._error(f"Exception creating folder: {err}") @@ -195,7 +186,7 @@ def set_folder_details( try: response = put( - f"{self._user_config.server.url}/api/folders", self._headers, data + f"{self._user_config.server.url}/folders", self._headers, data ) except Exception as err: self._error(f"Exception setting folder details: {err}") @@ -216,9 +207,7 @@ def set_folder_details( return None @skip_if_failed("_aborted", "_suppress_errors", False) - def save_file( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Save file """ @@ -227,7 +216,7 @@ def save_file( # Get presigned URL try: response = post( - f"{self._user_config.server.url}/api/artifacts", + f"{self._user_config.server.url}/artifacts", self._headers, prepare_for_api(data), ) @@ -283,11 +272,7 @@ def save_file( ) return None else: - if "pickledFile" in data: - use_filename = data["pickledFile"] - else: - use_filename = data["originalPath"] - + use_filename = data.get("pickledFile", data["originalPath"]) try: with open(use_filename, "rb") as fh: response = put( @@ -311,7 +296,7 @@ def save_file( return None if storage_id: - path = f"{self._user_config.server.url}/api/runs/{self._id}/artifacts" + path = f"{self._user_config.server.url}/runs/{self._id}/artifacts" data["storage"] = storage_id try: @@ -342,7 +327,7 @@ def add_alert(self, data, run=None): try: response = post( - f"{self._user_config.server.url}/api/alerts", self._headers, data + f"{self._user_config.server.url}/alerts", self._headers, data ) except Exception as err: self._error(f"Got exception when creating an alert: {str(err)}") @@ -361,25 +346,20 @@ def add_alert(self, data, run=None): return False @skip_if_failed("_aborted", "_suppress_errors", {}) - def set_alert_state( - self, alert_id, status - ) -> typing.Optional[dict[str, typing.Any]]: + def set_alert_state(self, alert_id, status) -> dict[str, typing.Any] | None: """ Set alert state """ data = {"run": self._id, "alert": alert_id, "status": status} try: response = put( - f"{self._user_config.server.url}/api/alerts/status", self._headers, data + f"{self._user_config.server.url}/alerts/status", self._headers, data ) except Exception as err: self._error(f"Got exception when setting alert state: {err}") return {} - if response.status_code == http.HTTPStatus.OK: - return response.json() - - return {} + return response.json() if response.status_code == http.HTTPStatus.OK else {} @skip_if_failed("_aborted", "_suppress_errors", []) def list_alerts(self) -> list[dict[str, typing.Any]]: @@ -387,7 +367,7 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: List alerts """ try: - response = get(f"{self._user_config.server.url}/api/alerts", self._headers) + response = get(f"{self._user_config.server.url}/alerts", self._headers) except Exception as err: self._error(f"Got exception when listing alerts: {str(err)}") return [] @@ -400,15 +380,10 @@ def list_alerts(self) -> list[dict[str, typing.Any]]: ) return [] - if response.status_code == http.HTTPStatus.OK: - return data - - return [] + return data if response.status_code == http.HTTPStatus.OK else [] @skip_if_failed("_aborted", "_suppress_errors", None) - def send_metrics( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_metrics(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send metrics """ @@ -416,7 +391,7 @@ def send_metrics( try: response = post( - f"{self._user_config.server.url}/api/metrics", + f"{self._user_config.server.url}/metrics", self._headers_mp, data, is_json=False, @@ -434,9 +409,7 @@ def send_metrics( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def send_event( - self, data: dict[str, typing.Any] - ) -> typing.Optional[dict[str, typing.Any]]: + def send_event(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None: """ Send events """ @@ -444,7 +417,7 @@ def send_event( try: response = post( - f"{self._user_config.server.url}/api/events", + f"{self._user_config.server.url}/events", self._headers_mp, data, is_json=False, @@ -462,7 +435,7 @@ def send_event( return None @skip_if_failed("_aborted", "_suppress_errors", None) - def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: + def send_heartbeat(self) -> dict[str, typing.Any] | None: """ Send heartbeat """ @@ -470,7 +443,7 @@ def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: try: response = put( - f"{self._user_config.server.url}/api/runs/heartbeat", + f"{self._user_config.server.url}/runs/heartbeat", self._headers, {"id": self._id}, ) @@ -488,11 +461,11 @@ def send_heartbeat(self) -> typing.Optional[dict[str, typing.Any]]: @skip_if_failed("_aborted", "_suppress_errors", False) def get_abort_status(self) -> bool: - logger.debug("Retrieving alert status") + logger.debug("Retrieving abort status") try: response = get( - f"{self._user_config.server.url}/api/runs/{self._id}/abort", + f"{self._user_config.server.url}/runs/{self._id}/abort", self._headers_mp, ) except Exception as err: diff --git a/simvue/metadata.py b/simvue/metadata.py index 7d6a6b07..abba595f 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -8,14 +8,16 @@ import contextlib import typing -import re import json import toml -import importlib.metadata +import logging import pathlib +import flatdict from simvue.utilities import simvue_timestamp +logger = logging.getLogger(__file__) + def git_info(repository: str) -> dict[str, typing.Any]: """Retrieves metadata for the target git repository @@ -42,14 +44,12 @@ def git_info(repository: str) -> dict[str, typing.Any]: try: git_repo = git.Repo(repository, search_parent_directories=True) current_commit: git.Commit = git_repo.head.commit - author_list: set[str] = set( + author_list: set[str] = { email for commit in git_repo.iter_commits("--all") if "noreply" not in (email := (commit.author.email or "")) and "[bot]" not in (commit.author.name or "") - ) - - ref: str = current_commit.hexsha + } # In the case where the repository is dirty blame should point to the # current developer, not the person responsible for the latest commit @@ -58,71 +58,65 @@ def git_info(repository: str) -> dict[str, typing.Any]: else: blame = current_commit.committer.email - for tag in git_repo.tags: - if tag.commit == current_commit: - ref = tag.name - break - + ref: str = next( + (tag.name for tag in git_repo.tags if tag.commit == current_commit), + current_commit.hexsha, + ) return { - "git.authors": json.dumps(list(author_list)), - "git.ref": ref, - "git.msg": current_commit.message.strip(), - "git.time_stamp": simvue_timestamp(current_commit.committed_datetime), - "git.blame": blame, - "git.url": git_repo.remote().url, - "git.dirty": dirty, + "git": { + "authors": json.dumps(list(author_list)), + "ref": ref, + "msg": current_commit.message.strip(), + "time_stamp": simvue_timestamp(current_commit.committed_datetime), + "blame": blame, + "url": git_repo.remote().url, + "dirty": dirty, + } } except (git.InvalidGitRepositoryError, ValueError): return {} def _python_env(repository: pathlib.Path) -> dict[str, typing.Any]: - """Retrieve a dictionary of Python dependencies if a file is available""" - meta: dict[str, str] = {} - req_meta: dict[str, str] = {} - - if (reqfile := pathlib.Path(repository).joinpath("requirements.txt")).exists(): - with reqfile.open() as in_req: - requirement_lines = in_req.readlines() - req_meta = {} - - for line in requirement_lines: - dependency, version = line.split("=", 1) - req_meta[dependency] = version - if (pptoml := pathlib.Path(repository).joinpath("pyproject.toml")).exists(): - content = toml.load(pptoml) - - requirements = (project := content.get("project", {})).get("dependencies") - - if requirements: - requirements = [re.split("[=><]", dep, 1)[0] for dep in requirements] - - requirements = requirements or ( - project := content.get("tool", {}).get("poetry", {}) - ).get("dependencies") - - if version := project.get("version"): - meta |= {"python.project.version": version} - - if name := project.get("name"): - meta |= {"python.project.name": name} - - if not requirements: - return meta - - req_meta = {} + """Retrieve a dictionary of Python dependencies if lock file is available""" + python_meta: dict[str, str] = {} + + if (pyproject_file := pathlib.Path(repository).joinpath("pyproject.toml")).exists(): + content = toml.load(pyproject_file) + if (poetry_content := content.get("tool", {}).get("poetry", {})).get("name"): + python_meta |= { + "python.project.name": poetry_content["name"], + "python.project.version": poetry_content["version"], + } + elif other_content := content.get("project"): + python_meta |= { + "python.project.name": other_content["name"], + "python.project.version": other_content["version"], + } + + if (poetry_lock_file := pathlib.Path(repository).joinpath("poetry.lock")).exists(): + content = toml.load(poetry_lock_file).get("package", {}) + python_meta |= { + f"python.environment.{package['name']}": package["version"] + for package in content + } + elif (uv_lock_file := pathlib.Path(repository).joinpath("uv.lock")).exists(): + content = toml.load(uv_lock_file).get("package", {}) + python_meta |= { + f"python.environment.{package['name']}": package["version"] + for package in content + } + else: + with contextlib.suppress((KeyError, ImportError)): + from pip._internal.operations.freeze import freeze - for package in requirements: - if package == "python": - continue - # Cover case where package is an optional dependency and not installed - with contextlib.suppress(importlib.metadata.PackageNotFoundError): - req_meta[package] = importlib.metadata.version(package) + python_meta |= { + f"python.environment.{entry[0]}": entry[-1] + for line in freeze(local_only=True) + if (entry := line.split("==")) + } - return meta | { - f"python.environment.{dependency}": version - for dependency, version in req_meta.items() - } + return python_meta def _rust_env(repository: pathlib.Path) -> dict[str, typing.Any]: @@ -148,6 +142,62 @@ def _rust_env(repository: pathlib.Path) -> dict[str, typing.Any]: } +def _julia_env(repository: pathlib.Path) -> dict[str, typing.Any]: + """Retrieve a dictionary of Julia dependencies if a project file is available""" + julia_meta: dict[str, str] = {} + if (project_file := pathlib.Path(repository).joinpath("Project.toml")).exists(): + content = toml.load(project_file) + julia_meta |= { + f"julia.project.{key}": value + for key, value in content.items() + if not isinstance(value, dict) + } + julia_meta |= { + f"julia.environment.{key}": value + for key, value in content.get("compat", {}).items() + } + return julia_meta + + +def _node_js_env(repository: pathlib.Path) -> dict[str, typing.Any]: + js_meta: dict[str, str] = {} + if ( + project_file := pathlib.Path(repository).joinpath("package-lock.json") + ).exists(): + content = json.load(project_file.open()) + if (lfv := content["lockfileVersion"]) not in (1, 2, 3): + logger.warning( + f"Unsupported package-lock.json lockfileVersion {lfv}, ignoring JS project metadata" + ) + return {} + + js_meta |= { + f"javascript.project.{key}": value + for key, value in content.items() + if key in ("name", "version") + } + js_meta |= { + f"javascript.environment.{key.replace('@', '')}": value["version"] + for key, value in content.get( + "packages" if lfv in (2, 3) else "dependencies", {} + ).items() + if key and not value.get("dev", True) + } + return js_meta + + def environment(repository: pathlib.Path = pathlib.Path.cwd()) -> dict[str, typing.Any]: """Retrieve environment metadata""" - return _python_env(repository) | _rust_env(repository) + _environment_meta = flatdict.FlatDict( + _python_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _rust_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _julia_env(repository), delimiter="." + ).as_dict() + _environment_meta |= flatdict.FlatDict( + _node_js_env(repository), delimiter="." + ).as_dict() + return _environment_meta diff --git a/simvue/metrics.py b/simvue/metrics.py index 4417e39d..fc345f56 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -1,5 +1,14 @@ +""" +CPU/GPU Metrics +=============== + +Get information relating to the usage of the CPU and GPU (where applicable) + +""" + import contextlib import logging +import psutil from .pynvml import ( nvmlDeviceGetComputeRunningProcesses, @@ -15,11 +24,11 @@ logger = logging.getLogger(__name__) -def get_process_memory(processes): +def get_process_memory(processes: list[psutil.Process]) -> int: """ Get the resident set size """ - rss = 0 + rss: int = 0 for process in processes: with contextlib.suppress(Exception): rss += process.memory_info().rss / 1024 / 1024 @@ -27,11 +36,11 @@ def get_process_memory(processes): return rss -def get_process_cpu(processes): +def get_process_cpu(processes: list[psutil.Process]) -> int: """ Get the CPU usage """ - cpu_percent = 0 + cpu_percent: int = 0 for process in processes: with contextlib.suppress(Exception): cpu_percent += process.cpu_percent() @@ -39,27 +48,24 @@ def get_process_cpu(processes): return cpu_percent -def is_gpu_used(handle, processes): +def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: """ Check if the GPU is being used by the list of processes """ pids = [process.pid for process in processes] - gpu_pids = [] - for process in nvmlDeviceGetComputeRunningProcesses(handle): - gpu_pids.append(process.pid) - - for process in nvmlDeviceGetGraphicsRunningProcesses(handle): - gpu_pids.append(process.pid) - + gpu_pids = [process.pid for process in nvmlDeviceGetComputeRunningProcesses(handle)] + gpu_pids.extend( + process.pid for process in nvmlDeviceGetGraphicsRunningProcesses(handle) + ) return len(list(set(gpu_pids) & set(pids))) > 0 -def get_gpu_metrics(processes): +def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: """ Get GPU metrics """ - gpu_metrics = {} + gpu_metrics: dict[str, float] = {} with contextlib.suppress(Exception): nvmlInit() diff --git a/simvue/models.py b/simvue/models.py index 235acd04..79aa2751 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -1,21 +1,62 @@ -from typing import Annotated, Dict, List, Optional, Union -from pydantic import BaseModel, Field, StringConstraints, PositiveInt +import datetime +import typing +import pydantic + FOLDER_REGEX: str = r"^/.*" NAME_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:]+$" METRIC_KEY_REGEX: str = r"^[a-zA-Z0-9\-\_\s\/\.:=><]+$" +DATETIME_FORMAT: str = "%Y-%m-%dT%H:%M:%S.%f" -MetadataKeyString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] -TagString = Annotated[str, StringConstraints(pattern=r"^[\w\-\s\.]+$")] -MetricKeyString = Annotated[str, StringConstraints(pattern=METRIC_KEY_REGEX)] +MetadataKeyString = typing.Annotated[ + str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$") +] +TagString = typing.Annotated[str, pydantic.StringConstraints(pattern=r"^[\w\-\s\.]+$")] +MetricKeyString = typing.Annotated[ + str, pydantic.StringConstraints(pattern=METRIC_KEY_REGEX) +] # Pydantic class to validate run.init() -class RunInput(BaseModel): - name: Optional[str] = Field(None, pattern=NAME_REGEX) - metadata: Optional[Dict[MetadataKeyString, Union[str, int, float, None]]] = None - tags: Optional[List[TagString]] = None - description: Optional[str] = None - folder: str = Field(pattern=FOLDER_REGEX) - status: Optional[str] = None - ttl: Optional[PositiveInt] = None +class RunInput(pydantic.BaseModel): + name: str | None = pydantic.Field(None, pattern=NAME_REGEX) + metadata: dict[MetadataKeyString, str | int | float | None] | None = None + tags: list[TagString] | None = None + description: str | None = None + folder: str = pydantic.Field(pattern=FOLDER_REGEX) + status: str | None = None + ttl: pydantic.PositiveInt | None = None + + +class MetricSet(pydantic.BaseModel): + time: pydantic.NonNegativeFloat | pydantic.NonNegativeInt + timestamp: str + step: pydantic.NonNegativeInt + values: dict[str, int | float | bool] + + @pydantic.field_validator("timestamp", mode="after") + @classmethod + def timestamp_str(cls, value: str) -> str: + try: + datetime.datetime.strptime(value, DATETIME_FORMAT) + except ValueError as e: + raise AssertionError( + f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" + ) from e + return value + + +class EventSet(pydantic.BaseModel): + message: str + timestamp: str + + @pydantic.field_validator("timestamp", mode="after") + @classmethod + def timestamp_str(cls, value: str) -> str: + try: + datetime.datetime.strptime(value, DATETIME_FORMAT) + except ValueError as e: + raise AssertionError( + f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" + ) from e + return value diff --git a/simvue/run.py b/simvue/run.py index 03aaf706..af7a5aa0 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -7,7 +7,6 @@ """ import contextlib -import json import logging import pathlib import mimetypes @@ -28,38 +27,48 @@ import uuid import click -import msgpack import psutil -from pydantic import ValidationError + +from simvue.api.objects.alert.base import AlertBase +from simvue.api.objects.alert.fetch import Alert +from simvue.api.objects.folder import Folder, get_folder_from_path +from simvue.exception import ObjectNotFoundError, SimvueRunError +from simvue.utilities import prettify_pydantic + from .config.user import SimvueConfiguration -import simvue.api as sv_api from .factory.dispatch import Dispatcher from .executor import Executor -from .factory.proxy import Simvue from .metrics import get_gpu_metrics, get_process_cpu, get_process_memory -from .models import RunInput, FOLDER_REGEX, NAME_REGEX, MetricKeyString -from .serialization import serialize_object +from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString from .system import get_system from .metadata import git_info, environment from .eco import SimvueEmissionsTracker from .utilities import ( - calculate_sha256, - compare_alerts, skip_if_failed, validate_timestamp, simvue_timestamp, ) +from .api.objects import ( + Run as RunObject, + FileArtifact, + ObjectArtifact, + MetricsThresholdAlert, + MetricsRangeAlert, + UserAlert, + EventsAlert, + Events, + Metrics, +) try: from typing import Self except ImportError: - from typing_extensions import Self + from typing_extensions import Self # noqa: F401 if typing.TYPE_CHECKING: - from .factory.proxy import SimvueBaseClass from .factory.dispatch import DispatcherBaseClass UPLOAD_TIMEOUT: int = 30 @@ -74,6 +83,9 @@ def check_run_initialised( ) -> typing.Callable[..., typing.Any]: @functools.wraps(function) def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: + # Tidy pydantic errors + _function = prettify_pydantic(function) + if self._user_config.run.mode == "disabled": return True @@ -81,11 +93,11 @@ def _wrapper(self: Self, *args: typing.Any, **kwargs: typing.Any) -> typing.Any: self._active = False raise RuntimeError("Cannot update expired Simvue Run") - if not self._simvue: + if not self._sv_obj: raise RuntimeError( f"Simvue Run must be initialised before calling '{function.__name__}'" ) - return function(self, *args, **kwargs) + return _function(self, *args, **kwargs) return _wrapper @@ -102,9 +114,9 @@ class Run: def __init__( self, mode: typing.Literal["online", "offline", "disabled"] = "online", - abort_callback: typing.Optional[typing.Callable[[Self], None]] = None, - server_token: typing.Optional[str] = None, - server_url: typing.Optional[str] = None, + abort_callback: typing.Callable[[Self], None] | None = None, + server_token: pydantic.SecretStr | None = None, + server_url: str | None = None, debug: bool = False, ) -> None: """Initialise a new Simvue run @@ -128,32 +140,31 @@ def __init__( run in debug mode, default is False """ self._uuid: str = f"{uuid.uuid4()}" - self._name: typing.Optional[str] = None + self._name: str | None = None # monitor duration with respect to retention period self._timer: float = 0 - self._retention: typing.Optional[float] = None + self._retention: float | None = None self._testing: bool = False self._abort_on_alert: typing.Literal["run", "terminate", "ignore"] = "terminate" - self._abort_callback: typing.Optional[typing.Callable[[Self], None]] = ( - abort_callback - ) + self._abort_callback: typing.Callable[[Self], None] | None = abort_callback self._dispatch_mode: typing.Literal["direct", "queued"] = "queued" self._executor = Executor(self) - self._dispatcher: typing.Optional[DispatcherBaseClass] = None - self._emissions_tracker: typing.Optional[SimvueEmissionsTracker] = None + self._dispatcher: DispatcherBaseClass | None = None - self._id: typing.Optional[str] = None + self._id: str | None = None + self._folder: Folder | None = None self._term_color: bool = True self._suppress_errors: bool = False self._queue_blocking: bool = False - self._status: typing.Optional[ + self._status: ( typing.Literal[ "created", "running", "completed", "failed", "terminated", "lost" ] - ] = None + | None + ) = None self._data: dict[str, typing.Any] = {} self._step: int = 0 self._active: bool = False @@ -169,35 +180,47 @@ def __init__( ) self._aborted: bool = False - self._resources_metrics_interval: typing.Optional[int] = HEARTBEAT_INTERVAL + self._resources_metrics_interval: int | None = ( + HEARTBEAT_INTERVAL + if self._user_config.metrics.resources_metrics_interval < 1 + else self._user_config.metrics.resources_metrics_interval + ) self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token}" + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" } - self._simvue: typing.Optional[SimvueBaseClass] = None - self._pid: typing.Optional[int] = 0 - self._shutdown_event: typing.Optional[threading.Event] = None + self._sv_obj: RunObject | None = None + self._pid: int | None = 0 + self._shutdown_event: threading.Event | None = None self._configuration_lock = threading.Lock() - self._heartbeat_termination_trigger: typing.Optional[threading.Event] = None - self._storage_id: typing.Optional[str] = None - self._heartbeat_thread: typing.Optional[threading.Thread] = None + self._heartbeat_termination_trigger: threading.Event | None = None + self._storage_id: str | None = None + self._heartbeat_thread: threading.Thread | None = None self._heartbeat_interval: int = HEARTBEAT_INTERVAL - self._emission_metrics_interval: int = HEARTBEAT_INTERVAL + self._emission_metrics_interval: int | None = ( + HEARTBEAT_INTERVAL + if ( + (_interval := self._user_config.metrics.emission_metrics_interval) + and _interval < 1 + ) + else self._user_config.metrics.emission_metrics_interval + ) + self._emissions_tracker: SimvueEmissionsTracker | None = ( + SimvueEmissionsTracker("simvue", self, self._emission_metrics_interval) + if self._user_config.metrics.enable_emission_metrics + else None + ) def __enter__(self) -> Self: return self def _handle_exception_throw( self, - exc_type: typing.Optional[typing.Type[BaseException]], + exc_type: typing.Type[BaseException] | None, value: BaseException, - traceback: typing.Optional[ - typing.Union[typing.Type[BaseException], BaseException] - ], + traceback: typing.Type[BaseException] | BaseException | None, ) -> None: - _exception_thrown: typing.Optional[str] = ( - exc_type.__name__ if exc_type else None - ) + _exception_thrown: str | None = exc_type.__name__ if exc_type else None _is_running: bool = self._status == "running" _is_running_online: bool = self._id is not None and _is_running _is_running_offline: bool = ( @@ -242,11 +265,9 @@ def _handle_exception_throw( def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]], + exc_type: typing.Type[BaseException] | None, value: BaseException, - traceback: typing.Optional[ - typing.Union[typing.Type[BaseException], BaseException] - ], + traceback: typing.Type[BaseException] | BaseException | None, ) -> None: logger.debug( "Automatically closing run '%s' in status %s", @@ -358,7 +379,7 @@ def _heartbeat( # Check if the user has aborted the run with self._configuration_lock: - if self._simvue and self._simvue.get_abort_status(): + if self._sv_obj and self._sv_obj.abort_trigger: self._alert_raised_trigger.set() logger.debug("Received abort request from server") @@ -381,8 +402,8 @@ def _heartbeat( if self._abort_on_alert == "terminate": os._exit(1) - if self._simvue: - self._simvue.send_heartbeat() + if self._sv_obj: + self._sv_obj.send_heartbeat() return _heartbeat @@ -398,67 +419,30 @@ def _create_dispatch_callback( if self._user_config.run.mode == "online" and not self._id: raise RuntimeError("Expected identifier for run") - if not self._user_config.server.url: + if not self._user_config.server.url or not self._sv_obj: raise RuntimeError("Cannot commence dispatch, run not initialised") - def _offline_dispatch_callback( + def _dispatch_callback( buffer: list[typing.Any], - category: str, - run_id: typing.Optional[str] = self._id, - uuid: str = self._uuid, + category: typing.Literal["events", "metrics"], + run_obj: RunObject = self._sv_obj, ) -> None: - _offline_directory = self._user_config.offline.cache - if not os.path.exists(_offline_directory): - logger.error( - f"Cannot write to offline directory '{_offline_directory}', directory not found." + if category == "events": + _events = Events.new( + run=self.id, + offline=self._user_config.run.mode == "offline", + events=buffer, ) - return - _directory = os.path.join(_offline_directory, uuid) - - unique_id = time.time() - filename = f"{_directory}/{category}-{unique_id}" - _data = {category: buffer, "run": run_id} - try: - with open(filename, "w") as fh: - json.dump(_data, fh) - except Exception as err: - if self._suppress_errors: - logger.error( - "Got exception writing offline update for %s: %s", - category, - str(err), - ) - else: - raise err - - def _online_dispatch_callback( - buffer: list[typing.Any], - category: str, - url: str = self._user_config.server.url, - run_id: typing.Optional[str] = self._id, - headers: dict[str, str] = self._headers, - ) -> None: - if not buffer: - return - _data = {category: buffer, "run": run_id} - _data_bin = msgpack.packb(_data, use_bin_type=True) - _url: str = f"{url}/api/{category}" - - _msgpack_header = headers | {"Content-Type": "application/msgpack"} - - try: - sv_api.post( - url=_url, headers=_msgpack_header, data=_data_bin, is_json=False + _events.commit() + else: + _metrics = Metrics.new( + run=self.id, + offline=self._user_config.run.mode == "offline", + metrics=buffer, ) - except (ValueError, RuntimeError) as e: - self._error(f"{e}", join_threads=False) - return - - return ( - _online_dispatch_callback - if self._user_config.run.mode == "online" - else _offline_dispatch_callback - ) + _metrics.commit() + + return _dispatch_callback def _start(self, reconnect: bool = False) -> bool: """Start a run @@ -481,13 +465,9 @@ def _start(self, reconnect: bool = False) -> bool: logger.debug("Starting run") - data: dict[str, typing.Any] = {"status": self._status} - - if reconnect: - data["system"] = get_system() - - if self._simvue and not self._simvue.update(data): - return False + if self._sv_obj: + self._sv_obj.status = self._status + self._sv_obj.commit() self._start_time = time.time() @@ -560,14 +540,16 @@ def _error(self, message: str, join_threads: bool = True) -> None: self._dispatcher.join() if not self._suppress_errors: - raise RuntimeError(message) - else: - # Simvue support now terminated as the instance of Run has entered - # the dormant state due to exception throw so set listing to be 'lost' - if self._status == "running" and self._simvue: - self._simvue.update({"name": self._name, "status": "lost"}) + raise SimvueRunError(message) - logger.error(message) + # Simvue support now terminated as the instance of Run has entered + # the dormant state due to exception throw so set listing to be 'lost' + if self._status == "running" and self._sv_obj: + self._sv_obj.name = self._name + self._sv_obj.status = "lost" + self._sv_obj.commit() + + logger.error(message) self._aborted = True @@ -575,24 +557,18 @@ def _error(self, message: str, join_threads: bool = True) -> None: @pydantic.validate_call def init( self, - name: typing.Optional[ - typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] - ] = None, + name: typing.Annotated[str | None, pydantic.Field(pattern=NAME_REGEX)] = None, *, - metadata: typing.Optional[ - dict[str, typing.Union[str, int, float, bool]] - ] = None, - tags: typing.Optional[list[str]] = None, - description: typing.Optional[str] = None, + metadata: dict[str, typing.Any] = None, + tags: list[str] | None = None, + description: str | None = None, folder: typing.Annotated[ str, pydantic.Field(None, pattern=FOLDER_REGEX) ] = None, running: bool = True, - retention_period: typing.Optional[str] = None, - timeout: typing.Optional[int] = 180, - visibility: typing.Union[ - typing.Literal["public", "tenant"], list[str], None - ] = None, + retention_period: str | None = None, + timeout: int | None = 180, + visibility: typing.Literal["public", "tenant"] | list[str] | None = None, no_color: bool = False, ) -> bool: """Initialise a Simvue run @@ -645,6 +621,14 @@ def init( self._term_color = not no_color + try: + self._folder = get_folder_from_path(path=folder) + except ObjectNotFoundError: + self._folder = Folder.new( + path=folder, offline=self._user_config.run.mode == "offline" + ) + self._folder.commit() # type: ignore + if isinstance(visibility, str) and visibility not in ("public", "tenant"): self._error( "invalid visibility option, must be either None, 'public', 'tenant' or a list of users" @@ -671,7 +655,7 @@ def init( # Parse the time to live/retention time if specified try: if retention_period: - self._retention: typing.Optional[int] = int( + self._retention: int | None = int( humanfriendly.parse_timespan(retention_period) ) else: @@ -682,40 +666,37 @@ def init( self._timer = time.time() - data: dict[str, typing.Any] = { - "metadata": (metadata or {}) | git_info(os.getcwd()) | environment(), - "tags": tags or [], - "status": self._status, - "ttl": self._retention, - "folder": folder, - "name": name, - "description": description, - "system": get_system() if self._status == "running" else None, - "visibility": { - "users": [] if not isinstance(visibility, list) else visibility, - "tenant": visibility == "tenant", - "public": visibility == "public", - }, - "heartbeat_timeout": timeout, + self._sv_obj = RunObject.new( + folder=folder, offline=self._user_config.run.mode == "offline" + ) + + if description: + self._sv_obj.description = description + + if name: + self._sv_obj.name = name + + self._sv_obj.visibility = { + "users": visibility if isinstance(visibility, list) else [], + "tenant": visibility == "tenant", + "public": visibility == "public", } + self._sv_obj.ttl = self._retention + self._sv_obj.status = self._status + self._sv_obj.tags = tags + self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() + self._sv_obj.heartbeat_timeout = timeout - # Check against the expected run input - try: - RunInput(**data) - except ValidationError as err: - self._error(f"{err}") - return False + if self._status == "running": + self._sv_obj.system = get_system() - self._simvue = Simvue( - name=self._name, - uniq_id=self._uuid, - mode=self._user_config.run.mode, - config=self._user_config, - suppress_errors=self._suppress_errors, - ) - name, self._id = self._simvue.create_run(data) + self._data = self._sv_obj._staging + self._sv_obj.commit() - self._data = data + if self._user_config.run.mode == "online": + name = self._sv_obj.name + + self._id = self._sv_obj.id if not name: return False @@ -738,7 +719,7 @@ def init( fg="green" if self._term_color else None, ) - if self._emissions_tracker: + if self._emissions_tracker and self._status == "running": self._emissions_tracker.post_init() self._emissions_tracker.start() @@ -750,15 +731,15 @@ def add_process( self, identifier: str, *cmd_args, - executable: typing.Optional[typing.Union[str, pathlib.Path]] = None, - script: typing.Optional[pydantic.FilePath] = None, - input_file: typing.Optional[pydantic.FilePath] = None, + executable: str | pathlib.Path | None = None, + script: pydantic.FilePath | None = None, + input_file: pydantic.FilePath | None = None, completion_callback: typing.Optional[ typing.Callable[[int, str, str], None] ] = None, - completion_trigger: typing.Optional[multiprocessing.synchronize.Event] = None, - env: typing.Optional[typing.Dict[str, str]] = None, - cwd: typing.Optional[pathlib.Path] = None, + completion_trigger: multiprocessing.synchronize.Event | None = None, + env: dict[str, str] | None = None, + cwd: pathlib.Path | None = None, **cmd_kwargs, ) -> None: """Add a process to be executed to the executor. @@ -812,9 +793,9 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: callback to run when process terminates (not supported on Windows) completion_trigger : multiprocessing.Event | None, optional this trigger event is set when the processes completes - env : typing.Dict[str, str], optional + env : dict[str, str], optional environment variables for process - cwd: typing.Optional[pathlib.Path], optional + cwd: pathlib.Path | None, optional working directory to execute the process within. Note that executable, input and script file paths should be absolute or relative to the directory where this method is called, not relative to the new working directory. **kwargs : Any, ..., optional @@ -826,15 +807,12 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: "due to function pickling restrictions for multiprocessing" ) - if isinstance(executable, pathlib.Path): - if not executable.is_file(): - raise FileNotFoundError( - f"Executable '{executable}' is not a valid file" - ) + if isinstance(executable, pathlib.Path) and not executable.is_file(): + raise FileNotFoundError(f"Executable '{executable}' is not a valid file") - cmd_list: typing.List[str] = [] + cmd_list: list[str] = [] pos_args = list(cmd_args) - executable_str: typing.Optional[str] = None + executable_str: str | None = None # Assemble the command for saving to metadata as string if executable: @@ -851,13 +829,13 @@ def callback_function(status_code: int, std_out: str, std_err: str) -> None: if isinstance(val, bool) and val: cmd_list += [f"-{kwarg}"] else: - cmd_list += [f"-{kwarg}{(' ' + _quoted_val) if val else ''}"] + cmd_list += [f"-{kwarg}{(f' {_quoted_val}') if val else ''}"] else: kwarg = kwarg.replace("_", "-") if isinstance(val, bool) and val: cmd_list += [f"--{kwarg}"] else: - cmd_list += [f"--{kwarg}{(' ' + _quoted_val) if val else ''}"] + cmd_list += [f"--{kwarg}{(f' {_quoted_val}') if val else ''}"] cmd_list += pos_args cmd_str = " ".join(cmd_list) @@ -908,7 +886,7 @@ def executor(self) -> Executor: return self._executor @property - def name(self) -> typing.Optional[str]: + def name(self) -> str | None: """Return the name of the run""" return self._name @@ -918,7 +896,7 @@ def uid(self) -> str: return self._uuid @property - def id(self) -> typing.Optional[str]: + def id(self) -> str | None: """Return the unique id of the run""" return self._id @@ -940,13 +918,7 @@ def reconnect(self, run_id: str) -> bool: self._status = "running" self._id = run_id - self._simvue = Simvue( - self._name, - self._id, - self._user_config.run.mode, - self._user_config, - self._suppress_errors, - ) + self._sv_obj = RunObject(identifier=self._id) self._start(reconnect=True) return True @@ -968,16 +940,14 @@ def set_pid(self, pid: int) -> None: def config( self, *, - suppress_errors: typing.Optional[bool] = None, - queue_blocking: typing.Optional[bool] = None, - resources_metrics_interval: typing.Optional[pydantic.PositiveInt] = None, - emission_metrics_interval: typing.Optional[pydantic.PositiveInt] = None, - enable_emission_metrics: typing.Optional[bool] = None, - disable_resources_metrics: typing.Optional[bool] = None, - storage_id: typing.Optional[str] = None, - abort_on_alert: typing.Optional[ - typing.Union[typing.Literal["run", "all", "ignore"], bool] - ] = None, + suppress_errors: bool | None = None, + queue_blocking: bool | None = None, + resources_metrics_interval: pydantic.PositiveInt | None = None, + emission_metrics_interval: pydantic.PositiveInt | None = None, + enable_emission_metrics: bool | None = None, + disable_resources_metrics: bool | None = None, + storage_id: str | None = None, + abort_on_alert: typing.Literal["run", "all", "ignore"] | bool | None = None, ) -> bool: """Optional configuration @@ -1038,6 +1008,13 @@ def config( "simvue", self, self._emission_metrics_interval ) + # If the main Run API object is initialised the run is active + # hence the tracker should start too + if self._sv_obj: + self._emissions_tracker.start() + elif enable_emission_metrics is False and self._emissions_tracker: + self._error("Cannot disable emissions tracker once it has been started") + if resources_metrics_interval: self._resources_metrics_interval = resources_metrics_interval @@ -1071,7 +1048,7 @@ def update_metadata(self, metadata: dict[str, typing.Any]) -> bool: bool if the update was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot update metadata, run not initialised") return False @@ -1079,12 +1056,12 @@ def update_metadata(self, metadata: dict[str, typing.Any]) -> bool: self._error("metadata must be a dict") return False - data: dict[str, dict[str, typing.Any]] = {"metadata": metadata} - - if self._simvue and self._simvue.update(data): + if self._sv_obj: + self._sv_obj.metadata = metadata + self._sv_obj.commit() return True - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1102,16 +1079,14 @@ def set_tags(self, tags: list[str]) -> bool: bool whether the update was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot update tags, run not initialised") return False - data: dict[str, list[str]] = {"tags": tags} - - if self._simvue and self._simvue.update(data): - return True + self._sv_obj.tags = tags + self._sv_obj.commit() - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1129,11 +1104,11 @@ def update_tags(self, tags: list[str]) -> bool: bool whether the update was successful """ - if not self._simvue: + if not self._sv_obj: return False try: - current_tags: list[str] = self._simvue.list_tags() or [] + current_tags: list[str] = self._sv_obj.tags except RuntimeError as e: self._error(f"{e.args[0]}") return False @@ -1149,7 +1124,7 @@ def update_tags(self, tags: list[str]) -> bool: @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call - def log_event(self, message: str, timestamp: typing.Optional[str] = None) -> bool: + def log_event(self, message: str, timestamp: str | None = None) -> bool: """Log event to the server Parameters @@ -1167,7 +1142,7 @@ def log_event(self, message: str, timestamp: typing.Optional[str] = None) -> boo if self._aborted: return False - if not self._simvue or not self._dispatcher: + if not self._sv_obj or not self._dispatcher: self._error("Cannot log events, run not initialised") return False @@ -1190,10 +1165,10 @@ def log_event(self, message: str, timestamp: typing.Optional[str] = None) -> boo def _add_metrics_to_dispatch( self, - metrics: dict[str, typing.Union[int, float]], - step: typing.Optional[int] = None, - time: typing.Optional[float] = None, - timestamp: typing.Optional[str] = None, + metrics: dict[str, int | float], + step: int | None = None, + time: float | None = None, + timestamp: str | None = None, join_on_fail: bool = True, ) -> bool: if self._user_config.run.mode == "disabled": @@ -1203,7 +1178,7 @@ def _add_metrics_to_dispatch( if not metrics: return True - if not self._simvue or not self._dispatcher: + if not self._sv_obj or not self._dispatcher: self._error("Cannot log metrics, run not initialised", join_on_fail) return False @@ -1236,16 +1211,16 @@ def _add_metrics_to_dispatch( @pydantic.validate_call def log_metrics( self, - metrics: dict[MetricKeyString, typing.Union[int, float]], - step: typing.Optional[int] = None, - time: typing.Optional[float] = None, - timestamp: typing.Optional[str] = None, + metrics: dict[MetricKeyString, int | float], + step: int | None = None, + time: float | None = None, + timestamp: str | None = None, ) -> bool: """Log metrics to Simvue server Parameters ---------- - metrics : dict[str, typing.Union[int, float]] + metrics : dict[str, int | float] set of metrics to upload to server for this run step : int, optional manually specify the step index for this log, by default None @@ -1276,6 +1251,7 @@ def save_object( typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ] = None, allow_pickle: bool = False, + metadata: dict[str, typing.Any] = None, ) -> bool: """Save an object to the Simvue server @@ -1289,43 +1265,35 @@ def save_object( name to associate with this object, by default None allow_pickle : bool, optional whether to allow pickling if all other serialization types fail, by default False + metadata : str | None, optional + any metadata to attach to the artifact Returns ------- bool whether object upload was successful """ - serialized = serialize_object(obj, allow_pickle) - - if not serialized or not (pickled := serialized[0]): - self._error(f"Failed to serialize '{obj}'") - return False - - data_type = serialized[1] - - if not data_type and not allow_pickle: - self._error("Unable to save Python object, set allow_pickle to True") + if not self._sv_obj or not self.id: + self._error("Cannot save files, run not initialised") return False - data: dict[str, typing.Any] = { - "pickled": pickled, - "type": data_type, - "checksum": calculate_sha256(pickled, False), - "originalPath": "", - "size": sys.getsizeof(pickled), - "name": name, - "run": self._name, - "category": category, - "storage": self._storage_id, - } + _name: str = name or f"{obj.__class__.__name__.lower()}_{id(obj)}" - # Register file try: - return self._simvue is not None and self._simvue.save_file(data) is not None - except RuntimeError as e: - self._error(f"{e.args[0]}") + _artifact = ObjectArtifact.new( + name=_name, + obj=obj, + allow_pickling=allow_pickle, + storage=self._storage_id, + metadata=metadata, + ) + _artifact.attach_to_run(self.id, category) + except (ValueError, RuntimeError) as e: + self._error(f"Failed to save object '{_name}' to run '{self.id}': {e}") return False + return True + @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call @@ -1333,11 +1301,12 @@ def save_file( self, file_path: pydantic.FilePath, category: typing.Literal["input", "output", "code"], - filetype: typing.Optional[str] = None, + file_type: str | None = None, preserve_path: bool = False, name: typing.Optional[ typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)] ] = None, + metadata: dict[str, typing.Any] = None, ) -> bool: """Upload file to the server @@ -1347,19 +1316,21 @@ def save_file( path to the file to upload category : Literal['input', 'output', 'code'] category of file with respect to this run - filetype : str, optional + file_type : str, optional the MIME file type else this is deduced, by default None preserve_path : bool, optional whether to preserve the path during storage, by default False name : str, optional name to associate with this file, by default None + metadata : str | None, optional + any metadata to attach to the artifact Returns ------- bool whether the upload was successful """ - if not self._simvue: + if not self._sv_obj or not self.id: self._error("Cannot save files, run not initialised") return False @@ -1367,14 +1338,6 @@ def save_file( self._error("Cannot upload output files for runs in the created state") return False - mimetypes.init() - mimetypes_valid = ["application/vnd.plotly.v1+json"] - mimetypes_valid += list(mimetypes.types_map.values()) - - if filetype and filetype not in mimetypes_valid: - self._error(f"Invalid MIME type '{filetype}' specified") - return False - stored_file_name: str = f"{file_path}" if preserve_path and stored_file_name.startswith("./"): @@ -1382,38 +1345,23 @@ def save_file( elif not preserve_path: stored_file_name = os.path.basename(file_path) - # Determine mimetype - if not (mimetype := filetype): - mimetype = mimetypes.guess_type(file_path)[0] or "application/octet-stream" - - data: dict[str, typing.Any] = { - "name": name or stored_file_name, - "run": self._name, - "type": mimetype, - "storage": self._storage_id, - "category": category, - "size": (file_size := os.path.getsize(file_path)), - "originalPath": os.path.abspath( - os.path.expanduser(os.path.expandvars(file_path)) - ), - "checksum": calculate_sha256(f"{file_path}", True), - } - - if not file_size: - click.secho( - "[simvue] WARNING: saving zero-sized files not currently supported", - bold=self._term_color, - fg="yellow" if self._term_color else None, - ) - return True - - # Register file try: - return self._simvue.save_file(data) is not None - except RuntimeError as e: - self._error(f"{e.args[0]}") + # Register file + _artifact = FileArtifact.new( + name=name or stored_file_name, + storage=self._storage_id, + file_path=file_path, + offline=self._user_config.run.mode == "offline", + mime_type=file_type, + metadata=metadata, + ) + _artifact.attach_to_run(self.id, category) + except (ValueError, RuntimeError) as e: + self._error(f"Failed to save file: {e}") return False + return True + @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call @@ -1421,7 +1369,7 @@ def save_directory( self, directory: pydantic.DirectoryPath, category: typing.Literal["output", "input", "code"], - filetype: typing.Optional[str] = None, + file_type: str | None = None, preserve_path: bool = False, ) -> bool: """Upload files from a whole directory @@ -1432,7 +1380,7 @@ def save_directory( the directory to save to the run category : Literal[['output', 'input', 'code'] the category to assign to the saved objects within this directory - filetype : str, optional + file_type : str, optional manually specify the MIME type for items in the directory, by default None preserve_path : bool, optional preserve the full path, by default False @@ -1442,24 +1390,21 @@ def save_directory( bool if the directory save was successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot save directory, run not inirialised") return False - if filetype: - mimetypes_valid = [] + if file_type: mimetypes.init() - for _, value in mimetypes.types_map.items(): - mimetypes_valid.append(value) - - if filetype not in mimetypes_valid: + mimetypes_valid = [value for _, value in mimetypes.types_map.items()] + if file_type not in mimetypes_valid: self._error("Invalid MIME type specified") return False for dirpath, _, filenames in os.walk(directory): for filename in filenames: if (full_path := pathlib.Path(dirpath).joinpath(filename)).is_file(): - self.save_file(full_path, category, filetype, preserve_path) + self.save_file(full_path, category, file_type, preserve_path) return True @@ -1468,9 +1413,9 @@ def save_directory( @pydantic.validate_call def save_all( self, - items: list[typing.Union[pydantic.FilePath, pydantic.DirectoryPath]], + items: list[pydantic.FilePath | pydantic.DirectoryPath], category: typing.Literal["input", "output", "code"], - filetype: typing.Optional[str] = None, + file_type: str | None = None, preserve_path: bool = False, ) -> bool: """Save a set of files and directories @@ -1481,7 +1426,7 @@ def save_all( list of file paths and directories to save category : Literal['input', 'output', 'code'] the category to assign to the saved objects - filetype : str, optional + file_type : str, optional manually specify the MIME type for all items, by default None preserve_path : bool, optional _preserve the full path, by default False @@ -1493,9 +1438,11 @@ def save_all( """ for item in items: if item.is_file(): - save_file = self.save_file(item, category, filetype, preserve_path) + save_file = self.save_file(item, category, file_type, preserve_path) elif item.is_dir(): - save_file = self.save_directory(item, category, filetype, preserve_path) + save_file = self.save_directory( + item, category, file_type, preserve_path + ) else: self._error(f"{item}: No such file or directory") save_file = False @@ -1528,15 +1475,12 @@ def set_status( self._error("Run is not active") return False - data: dict[str, str] = {"name": self._name, "status": status} self._status = status - try: - if self._simvue and self._simvue.update(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + if self._sv_obj: + self._sv_obj.status = status + self._sv_obj.commit() + return True return False @@ -1563,10 +1507,13 @@ def _tidy_run(self) -> None: self._dispatcher.purge() self._dispatcher.join() + if self._user_config.run.mode == "offline" and self._status != "created": + self._user_config.offline.cache.joinpath( + "runs", f"{self._id}.closed" + ).touch() + if _non_zero := self.executor.exit_status: - _error_msgs: dict[str, typing.Optional[str]] = ( - self.executor.get_error_summary() - ) + _error_msgs: dict[str, str] | None = self.executor.get_error_summary() _error_msg = "\n".join( f"{identifier}:\n{msg}" for identifier, msg in _error_msgs.items() ) @@ -1591,7 +1538,7 @@ def close(self) -> bool: """ self._executor.wait_for_completion() - if not self._simvue: + if not self._sv_obj: self._error("Cannot close run, not initialised") return False @@ -1608,17 +1555,14 @@ def close(self) -> bool: @pydantic.validate_call def set_folder_details( self, - path: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)], - metadata: typing.Optional[dict[str, typing.Union[int, str, float]]] = None, - tags: typing.Optional[list[str]] = None, - description: typing.Optional[str] = None, + metadata: dict[str, int | str | float] | None = None, + tags: list[str] | None = None, + description: str | None = None, ) -> bool: """Add metadata to the specified folder Parameters ---------- - path : str - folder path metadata : dict[str, int | str | float], optional additional metadata to attach to this folder, by default None tags : list[str], optional @@ -1631,7 +1575,7 @@ def set_folder_details( bool returns True if update was successful """ - if not self._simvue: + if not self._folder: self._error("Cannot update folder details, run was not initialised") return False @@ -1639,33 +1583,29 @@ def set_folder_details( self._error("Run is not active") return False - data: dict[str, typing.Any] = {"path": path} - - if metadata: - data["metadata"] = metadata or {} - - if tags: - data["tags"] = tags or [] - - if description: - data["description"] = description - try: - if self._simvue.set_folder_details(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") + self._folder.read_only(False) + if metadata: + self._folder.metadata = metadata + if tags: + self._folder.tags = tags + if description: + self._folder.description = description + self._folder.commit() + self._folder.read_only(True) + except (RuntimeError, ValueError, pydantic.ValidationError) as e: + self._error(f"Failed to update folder '{self._folder.name}' details: {e}") return False - return False + return True @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call def add_alerts( self, - ids: typing.Optional[list[str]] = None, - names: typing.Optional[list[str]] = None, + ids: list[str] | None = None, + names: list[str] | None = None, ) -> bool: """Add a set of existing alerts to this run by name or id @@ -1681,7 +1621,7 @@ def add_alerts( bool returns True if successful """ - if not self._simvue: + if not self._sv_obj: self._error("Cannot add alerts, run not initialised") return False @@ -1690,10 +1630,10 @@ def add_alerts( if names and not ids: try: - if alerts := self._simvue.list_alerts(): + if alerts := Alert.get(offline=self._user_config.run.mode == "offline"): for alert in alerts: - if alert["name"] in names: - ids.append(alert["id"]) + if alert.name in names: + ids.append(alert.id) except RuntimeError as e: self._error(f"{e.args[0]}") return False @@ -1704,193 +1644,259 @@ def add_alerts( self._error("Need to provide alert ids or alert names") return False - data: dict[str, typing.Any] = {"id": self._id, "alerts": ids} - - try: - if self._simvue.update(data): - return True - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + # Avoid duplication + self._sv_obj.alerts = list(set(self._sv_obj.alerts + [ids])) + self._sv_obj.commit() return False + def _attach_alert_to_run(self, alert: AlertBase) -> str | None: + # Check if the alert already exists + _alert_id: str | None = None + + for _, _existing_alert in Alert.get( + offline=self._user_config.run.mode == "offline" + ): + if _existing_alert.compare(alert): + _alert_id = _existing_alert.id + logger.info("Existing alert found with id: %s", _existing_alert.id) + break + + if not _alert_id: + alert.commit() + _alert_id = alert.id + + self._sv_obj.alerts = [_alert_id] + + self._sv_obj.commit() + + return _alert_id + @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @pydantic.validate_call - def create_alert( + def create_metric_range_alert( self, name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], - source: typing.Literal["events", "metrics", "user"] = "metrics", - description: typing.Optional[str] = None, - frequency: typing.Optional[pydantic.PositiveInt] = None, + metric: str, + range_low: float, + range_high: float, + rule: typing.Literal["is inside range", "is outside range"], + *, + description: str | None = None, window: pydantic.PositiveInt = 5, - rule: typing.Optional[ - typing.Literal[ - "is above", "is below", "is inside range", "is outside range" - ] - ] = None, - metric: typing.Optional[str] = None, - threshold: typing.Optional[float] = None, - range_low: typing.Optional[float] = None, - range_high: typing.Optional[float] = None, - aggregation: typing.Optional[ - typing.Literal["average", "sum", "at least one", "all"] + frequency: pydantic.PositiveInt = 1, + aggregation: typing.Literal[ + "average", "sum", "at least one", "all" ] = "average", notification: typing.Literal["email", "none"] = "none", - pattern: typing.Optional[str] = None, trigger_abort: bool = False, - ) -> typing.Optional[str]: - """Creates an alert with the specified name (if it doesn't exist) + ) -> str | None: + """Creates a metric range alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will not be duplicated. - Note available arguments depend on the alert source: - - Event - ===== - - Alerts triggered based on the contents of an event message, arguments are: - - frequency - - pattern - - Metrics - ======= + Parameters + ---------- + name : str + name of alert + metric : str + metric to monitor + range_low : float + the lower bound value + range_high : float, optional + the upper bound value + rule : Literal['is inside range', 'is outside range'] + rule defining range alert conditions + description : str, optional + description for this alert, default None + window : PositiveInt, optional + time period in seconds over which metrics are averaged, by default 5 + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default 1 + aggregation : Literal['average', 'sum', 'at least one', 'all'], optional + method to use when aggregating metrics within time window, default 'average'. + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort, default False - Alerts triggered based on metric value condictions, arguments are: - - frequency - - rule - - window - - aggregation - - metric - - threshold / (range_low, range_high) + Returns + ------- + str | None + returns the created alert ID if successful - User - ==== + """ + _alert = MetricsRangeAlert.new( + name=name, + description=description, + metric=metric, + window=window, + aggregation=aggregation, + notification=notification, + rule=rule, + range_low=range_low, + range_high=range_high, + frequency=frequency or 60, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - User defined alerts, manually triggered. + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_metric_threshold_alert( + self, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + metric: str, + threshold: float, + rule: typing.Literal["is above", "is below"], + *, + description: str | None = None, + window: pydantic.PositiveInt = 5, + frequency: pydantic.PositiveInt = 1, + aggregation: typing.Literal[ + "average", "sum", "at least one", "all" + ] = "average", + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> str | None: + """Creates a metric threshold alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. Parameters ---------- name : str name of alert - source : Literal['events', 'metrics', 'user'], optional - the source which triggers this alert based on status, either - event based, metric values or manual user defined trigger. By default "metrics". + metric : str + metric to monitor + threshold : float + the threshold value + rule : Literal['is inside range', 'is outside range'] + rule defining range alert conditions description : str, optional - description for this alert - frequency : PositiveInt, optional - frequency at which to check alert condition in seconds, by default None + description for this alert, default None window : PositiveInt, optional time period in seconds over which metrics are averaged, by default 5 - rule : Literal['is above', 'is below', 'is inside', 'is outside range'], optional - rule defining metric based alert conditions, by default None - metric : str, optional - metric to monitor, by default None - threshold : float, optional - the threshold value if 'rule' is 'is below' or 'is above', by default None - range_low : float, optional - the lower bound value if 'rule' is 'is inside range' or 'is outside range', by default None - range_high : float, optional - the upper bound value if 'rule' is 'is inside range' or 'is outside range', by default None + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default 1 aggregation : Literal['average', 'sum', 'at least one', 'all'], optional method to use when aggregating metrics within time window, default 'average'. notification : Literal['email', 'none'], optional whether to notify on trigger, by default "none" - pattern : str, optional - for event based alerts pattern to look for, by default None trigger_abort : bool, optional - whether this alert can trigger a run abort + whether this alert can trigger a run abort, default False Returns ------- str | None returns the created alert ID if successful - """ - if not self._simvue: - self._error("Cannot add alert, run not initialised") - return None - if rule in ("is below", "is above") and threshold is None: - self._error("threshold must be defined for the specified alert type") - return None + """ + _alert = MetricsThresholdAlert.new( + name=name, + metric=metric, + description=description, + threshold=threshold, + rule=rule, + window=window, + frequency=frequency, + aggregation=aggregation, + notification=notification, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - if rule in ("is outside range", "is inside range") and ( - range_low is None or range_high is None - ): - self._error( - "range_low and range_high must be defined for the specified alert type" - ) - return None - - alert_definition = {} - - if source == "metrics": - alert_definition["aggregation"] = aggregation - alert_definition["metric"] = metric - alert_definition["window"] = window - alert_definition["rule"] = rule - alert_definition["frequency"] = frequency - if threshold is not None: - alert_definition["threshold"] = threshold - elif range_low is not None and range_high is not None: - alert_definition["range_low"] = range_low - alert_definition["range_high"] = range_high - elif source == "events": - alert_definition["pattern"] = pattern - alert_definition["frequency"] = frequency - else: - alert_definition = None - - alert: dict[str, typing.Any] = { - "name": name, - "notification": notification, - "source": source, - "alert": alert_definition, - "description": description, - "abort": trigger_abort, - } + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_event_alert( + self, + name: str, + pattern: str, + *, + description: str | None = None, + frequency: pydantic.PositiveInt = 1, + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> str | None: + """Creates an events alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. - # Check if the alert already exists - alert_id: typing.Optional[str] = None - try: - alerts = self._simvue.list_alerts() - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None + Parameters + ---------- + name : str + name of alert + pattern : str, optional + for event based alerts pattern to look for, by default None + frequency : PositiveInt, optional + frequency at which to check alert condition in seconds, by default None + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort - if alerts: - for existing_alert in alerts: - if existing_alert["name"] == alert["name"]: - if compare_alerts(existing_alert, alert): - alert_id = existing_alert["id"] - logger.info("Existing alert found with id: %s", alert_id) - break + Returns + ------- + str | None + returns the created alert ID if successful - if not alert_id: - try: - logger.debug(f"Creating new alert with definition: {alert}") - response = self._simvue.add_alert(alert) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None + """ + _alert = EventsAlert.new( + name=name, + description=description, + pattern=pattern, + notification=notification, + frequency=frequency, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) - if not (alert_id := (response or {}).get("id")): - self._error("unable to create alert") - return None + @skip_if_failed("_aborted", "_suppress_errors", None) + @check_run_initialised + @pydantic.validate_call + def create_user_alert( + self, + name: typing.Annotated[str, pydantic.Field(pattern=NAME_REGEX)], + *, + description: str | None = None, + notification: typing.Literal["email", "none"] = "none", + trigger_abort: bool = False, + ) -> None: + """Creates a user alert with the specified name (if it doesn't exist) + and applies it to the current run. If alert already exists it will + not be duplicated. - if alert_id: - # TODO: What if we keep existing alerts/add a new one later? - data = {"id": self._id, "alerts": [alert_id]} - logger.debug(f"Updating run with info: {data}") + Parameters + ---------- + name : str + name of alert + description : str, optional + description for this alert, default None + notification : Literal['email', 'none'], optional + whether to notify on trigger, by default "none" + trigger_abort : bool, optional + whether this alert can trigger a run abort, default False - try: - self._simvue.update(data) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return None + Returns + ------- + str | None + returns the created alert ID if successful - return alert_id + """ + _alert = UserAlert.new( + name=name, + notification=notification, + description=description, + offline=self._user_config.run.mode == "offline", + ) + _alert.abort = trigger_abort + return self._attach_alert_to_run(_alert) @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @@ -1915,14 +1921,16 @@ def log_alert( if state not in ("ok", "critical"): self._error('state must be either "ok" or "critical"') return False - if not self._simvue: - self._error("Cannot log alert, run not initialised") - return False - try: - self._simvue.set_alert_state(identifier, state) - except RuntimeError as e: - self._error(f"{e.args[0]}") - return False + _alert = UserAlert(identifier=identifier) + # if not isinstance(_alert, UserAlert): + # self._error( + # f"Cannot update state for alert '{identifier}' " + # f"of type '{_alert.__class__.__name__.lower()}'" + # ) + # return False + _alert.read_only(False) + _alert.set_status(run_id=self._id, status=state) + _alert.commit() return True diff --git a/simvue/sender.py b/simvue/sender.py index 615821d4..ca087d72 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -1,331 +1,163 @@ -from concurrent.futures import ThreadPoolExecutor -import glob -import json -import typing -import logging -import os -import shutil -import time +""" +Simvue Sender +============== -import msgpack +Function to send data cached by Simvue in Offline mode to the server. +""" +import json +import pydantic +import logging +from concurrent.futures import ThreadPoolExecutor +import threading from simvue.config.user import SimvueConfiguration -from .factory.proxy.remote import Remote -from .utilities import create_file, remove_file - -logger = logging.getLogger(__name__) - -NUM_PARALLEL_WORKERS = 10 -MAX_RUNS = 10 - - -def set_details(name, id, filename): - """ - Write name & id to file - """ - data = {"name": name, "id": id} - with open(filename, "w") as fh: - json.dump(data, fh) - - -def get_details(name): - """ - Get name & id from file - """ - with open(name) as fh: - data = json.load(fh) - return data["name"], data["id"] - - -def update_name(id, data): - """ - Update id in metrics/events - """ - for item in data: - item["id"] = id - - -def add_name(name, data, filename): - """ - Update name in JSON - """ - if not data["name"]: - data["name"] = name - with open(filename, "w") as fh: - json.dump(data, fh) - - return data - - -def read_json(filename): - with open(filename, "r") as fh: - return json.load(fh) - - -def get_json(filename, run_id=None, artifact=False): - """ - Get JSON from a file - """ - with open(filename, "r") as fh: - data = json.load(fh) - if run_id: - if artifact: - for item in data: - if item == "run": - data[item] = run_id - return data - - if "run" in data: - data["run"] = run_id - else: - data["id"] = run_id - - return data - - -def sender( - server_url: typing.Optional[str] = None, server_token: typing.Optional[str] = None -) -> str: - """ - Asynchronous upload of runs to Simvue server - """ - directory = SimvueConfiguration.fetch(mode="offline").offline.cache - - # Clean up old runs after waiting 5 mins - runs = glob.glob(f"{directory}/*/sent") - - for run in runs: - id = run.split("/")[len(run.split("/")) - 2] - logger.info("Cleaning up directory with id %s", id) - - if time.time() - os.path.getmtime(run) > 300: - try: - shutil.rmtree(f"{directory}/{id}") - except Exception: - logger.error("Got exception trying to cleanup run in directory %s", id) - - # Deal with runs in the created, running or a terminal state - runs = ( - glob.glob(f"{directory}/*/created") - + glob.glob(f"{directory}/*/running") - + glob.glob(f"{directory}/*/completed") - + glob.glob(f"{directory}/*/failed") - + glob.glob(f"{directory}/*/terminated") - ) - - if len(runs) > MAX_RUNS: - logger.info("Lauching %d workers", NUM_PARALLEL_WORKERS) - with ThreadPoolExecutor(NUM_PARALLEL_WORKERS) as executor: - for run in runs: - executor.submit( - process(run, server_token=server_token, server_url=server_url) - ) - return [executor.result() for _ in runs] - else: - return [process(run) for run in runs] - - -def process( - run, server_url: typing.Optional[str], server_token: typing.Optional[str] -) -> typing.Optional[str]: +import simvue.api.objects + +UPLOAD_ORDER: list[str] = [ + "tenants", + "users", + "storage", + "folders", + "tags", + "alerts", + "runs", + "artifacts", + "metrics", + "events", +] + +_logger = logging.getLogger(__name__) + + +def upload_cached_file( + cache_dir: pydantic.DirectoryPath, + obj_type: str, + file_path: pydantic.FilePath, + id_mapping: dict[str, str], + lock: threading.Lock, +): + """Upload data stored in a cached file to the Simvue server. + + Parameters + ---------- + cache_dir : pydantic.DirectoryPath + The directory where cached files are stored + obj_type : str + The type of object which should be created for this cached file + file_path : pydantic.FilePath + The path to the cached file to upload + id_mapping : dict[str, str] + A mapping of offline to online object IDs + lock : threading.Lock + A lock to prevent multiple threads accessing the id mapping directory at once """ - Handle updates for the specified run - """ - status = None - - if run.endswith("running"): - status = "running" - if run.endswith("created"): - status = "created" - elif run.endswith("completed"): - status = "completed" - elif run.endswith("failed"): - status = "failed" - elif run.endswith("terminated"): - status = "terminated" - - current = ( - run.replace("/running", "") - .replace("/completed", "") - .replace("/failed", "") - .replace("/terminated", "") - .replace("/created", "") - ) - - if os.path.isfile(f"{current}/sent"): - if status == "running": - remove_file(f"{current}/running") - elif status == "completed": - remove_file(f"{current}/completed") - elif status == "failed": - remove_file(f"{current}/failed") - elif status == "terminated": - remove_file(f"{current}/terminated") - elif status == "created": - remove_file(f"{current}/created") - return - - id = run.split("/")[len(run.split("/")) - 2] - - run_init = get_json(f"{current}/run.json") - start_time = os.path.getctime(f"{current}/run.json") - - if run_init["name"]: - logger.info("Considering run with name %s and id %s", run_init["name"], id) + _current_id = file_path.name.split(".")[0] + _data = json.load(file_path.open()) + _exact_type: str = _data.pop("obj_type") + try: + _instance_class = getattr(simvue.api.objects, _exact_type) + except AttributeError as e: + raise RuntimeError(f"Attempt to initialise unknown type '{_exact_type}'") from e + + # If it is an ObjectArtifact, need to load the object as bytes from a different file + if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): + with open(file_path.parent.joinpath(f"{_current_id}.object"), "rb") as file: + _data["serialized"] = file.read() + + # We want to reconnect if there is an online ID stored for this file + if _online_id := id_mapping.get(_current_id): + obj_for_upload = _instance_class( + identifier=_online_id, _read_only=False, **_data + ) else: - logger.info("Considering run with no name yet and id %s", id) + obj_for_upload = _instance_class.new(**_data) - # Create run if it hasn't previously been created - created_file = f"{current}/init" - name = None - config = SimvueConfiguration.fetch( - mode="online", server_token=server_token, server_url=server_url - ) - if not os.path.isfile(created_file): - remote = Remote( - name=run_init["name"], uniq_id=id, config=config, suppress_errors=False - ) + with lock: + obj_for_upload.on_reconnect(id_mapping) - name, run_id = remote.create_run(run_init) - if name: - logger.info("Creating run with name %s and id %s", name, id) - run_init = add_name(name, run_init, f"{current}/run.json") - set_details(name, run_id, created_file) - else: - logger.error("Failure creating run") + try: + obj_for_upload.commit() + _new_id = obj_for_upload.id + except RuntimeError as error: + if "status 409" in error.args[0]: return - else: - name, run_id = get_details(created_file) - run_init["name"] = name - remote = Remote( - name=run_init["name"], uniq_id=run_id, config=config, suppress_errors=False + raise error + if not _new_id: + raise RuntimeError( + f"Object of type '{obj_for_upload.__class__.__name__}' has no identifier" ) + if id_mapping.get(_current_id): + _logger.info(f"Updated {obj_for_upload.__class__.__name__} '{_new_id}'") + else: + _logger.info(f"Created {obj_for_upload.__class__.__name__} '{_new_id}'") + file_path.unlink(missing_ok=True) + if issubclass(_instance_class, simvue.api.objects.ObjectArtifact): + file_path.parent.joinpath(f"{_current_id}.object").unlink() - if status == "running": - # Check for recent heartbeat - heartbeat_filename = f"{current}/heartbeat" - if os.path.isfile(heartbeat_filename): - mtime = os.path.getmtime(heartbeat_filename) - if time.time() - mtime > 180: - status = "lost" - - # Check for no recent heartbeat - if not os.path.isfile(heartbeat_filename): - if time.time() - start_time > 180: - status = "lost" - - # Handle lost runs - if status == "lost": - logger.info("Changing status to lost, name %s and id %s", run_init["name"], id) - status = "lost" - create_file(f"{current}/lost") - remove_file(f"{current}/running") - - # Send heartbeat if the heartbeat file was touched recently - heartbeat_filename = f"{current}/heartbeat" - if os.path.isfile(heartbeat_filename): - if ( - status == "running" - and time.time() - os.path.getmtime(heartbeat_filename) < 120 - ): - logger.info("Sending heartbeat for run with name %s", run_init["name"]) - remote.send_heartbeat() - - metrics_gathered = [] - events_gathered = [] - - # Upload metrics, events, files & metadata as necessary - files = sorted(glob.glob(f"{current}/*"), key=os.path.getmtime) - updates = 0 - for record in files: - if ( - record.endswith("/run.json") - or record.endswith("/running") - or record.endswith("/completed") - or record.endswith("/failed") - or record.endswith("/terminated") - or record.endswith("/lost") - or record.endswith("/sent") - or record.endswith("-proc") - ): - continue - - rename = False - - # Handle metrics - if "/metrics-" in record: - logger.info("Gathering metrics for run %s", run_init["name"]) - data = get_json(record, run_id) - metrics_gathered = metrics_gathered + data["metrics"] - rename = True - - # Handle events - if "/events-" in record: - logger.info("Gathering events for run %s", run_init["name"]) - data = get_json(record, run_id) - events_gathered = events_gathered + data["events"] - rename = True - - # Handle updates - if "/update-" in record: - logger.info("Sending update for run %s", run_init["name"]) - data = get_json(record, run_id) - if remote.update(data): - for item in data: - if item == "status" and data[item] in ( - "completed", - "failed", - "terminated", - ): - create_file(f"{current}/sent") - remove_file(f"{current}/{status}") - rename = True - - # Handle folders - if "/folder-" in record: - logger.info("Sending folder details for run %s", run_init["name"]) - if remote.set_folder_details(get_json(record, run_id)): - rename = True - - # Handle alerts - if "/alert-" in record: - logger.info("Sending alert details for run %s", run_init["name"]) - if remote.add_alert(get_json(record, run_id)): - rename = True - - # Handle files - if "/file-" in record: - logger.info("Saving file for run %s", run_init["name"]) - if remote.save_file(get_json(record, run_id, True)): - rename = True + with lock: + id_mapping[_current_id] = _new_id - # Rename processed files - if rename: - os.rename(record, f"{record}-proc") - updates += 1 + if obj_type in {"alerts", "runs", "folders", "tags"}: + cache_dir.joinpath("server_ids", f"{_current_id}.txt").write_text(_new_id) - # Send metrics if necessary - if metrics_gathered: - logger.info("Sending metrics for run %s", run_init["name"]) - data = {"metrics": metrics_gathered, "run": run_id} - remote.send_metrics(msgpack.packb(data, use_bin_type=True)) + if ( + obj_type == "runs" + and cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").exists() + ): + # Get list of alerts created by this run - their IDs can be deleted + for id in _data.get("alerts", []): + cache_dir.joinpath("server_ids", f"{id}.txt").unlink() - # Send events if necessary - if events_gathered: - logger.info("Sending events for run %s", run_init["name"]) - data = {"events": events_gathered, "run": run_id} - remote.send_event(msgpack.packb(data, use_bin_type=True)) + cache_dir.joinpath("server_ids", f"{_current_id}.txt").unlink() + cache_dir.joinpath(f"{obj_type}", f"{_current_id}.closed").unlink() + _logger.info(f"Run {_current_id} closed - deleting cached copies...") - # If the status is completed and there were no updates, the run must have completely finished - if updates == 0 and status in ("completed", "failed", "terminated"): - logger.info("Finished sending run %s", run_init["name"]) - data = {"id": run_id, "status": status} - if remote.update(data): - create_file(f"{current}/sent") - remove_file(f"{current}/{status}") - elif updates == 0 and status == "lost": - logger.info("Finished sending run %s as it was lost", run_init["name"]) - create_file(f"{current}/sent") - return run_id +@pydantic.validate_call +def sender( + cache_dir: pydantic.DirectoryPath | None = None, + max_workers: int = 5, + threading_threshold: int = 10, + objects_to_upload: list[str] = UPLOAD_ORDER, +): + """Send data from a local cache directory to the Simvue server. + + Parameters + ---------- + cache_dir : pydantic.DirectoryPath + The directory where cached files are stored + max_workers : int + The maximum number of threads to use + threading_threshold : int + The number of cached files above which threading will be used + objects_to_upload : list[str] + Types of objects to upload, by default uploads all types of objects present in cache + """ + cache_dir = cache_dir or SimvueConfiguration.fetch().offline.cache + cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) + _id_mapping: dict[str, str] = { + file_path.name.split(".")[0]: file_path.read_text() + for file_path in cache_dir.glob("server_ids/*.txt") + } + _lock = threading.Lock() + _upload_order = [item for item in UPLOAD_ORDER if item in objects_to_upload] + + for _obj_type in _upload_order: + _offline_files = list(cache_dir.glob(f"{_obj_type}/*.json")) + if len(_offline_files) < threading_threshold: + for file_path in _offline_files: + upload_cached_file(cache_dir, _obj_type, file_path, _id_mapping, _lock) + else: + with ThreadPoolExecutor(max_workers=max_workers) as executor: + _results = executor.map( + lambda file_path: upload_cached_file( + cache_dir=cache_dir, + obj_type=_obj_type, + file_path=file_path, + id_mapping=_id_mapping, + lock=_lock, + ), + _offline_files, + ) + return _id_mapping diff --git a/simvue/serialization.py b/simvue/serialization.py index d8252713..024c2efc 100644 --- a/simvue/serialization.py +++ b/simvue/serialization.py @@ -5,6 +5,7 @@ Contains serializers for storage of objects on the Simvue server """ +import contextlib import typing import pickle import pandas @@ -45,9 +46,7 @@ def _is_torch_tensor(data: typing.Any) -> bool: return False -def serialize_object( - data: typing.Any, allow_pickle: bool -) -> typing.Optional[tuple[str, str]]: +def serialize_object(data: typing.Any, allow_pickle: bool) -> tuple[str, str] | None: """Determine which serializer to use for the given object Parameters @@ -76,23 +75,19 @@ def serialize_object( elif _is_torch_tensor(data): return _serialize_torch_tensor(data) elif module_name == "builtins" and class_name == "module" and not allow_pickle: - try: + with contextlib.suppress(ImportError): import matplotlib.pyplot if data == matplotlib.pyplot: return _serialize_matplotlib(data) - except ImportError: - pass elif serialized := _serialize_json(data): return serialized - if allow_pickle: - return _serialize_pickle(data) - return None + return _serialize_pickle(data) if allow_pickle else None @check_extra("plot") -def _serialize_plotly_figure(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_plotly_figure(data: typing.Any) -> tuple[str, str]: try: import plotly except ImportError: @@ -107,7 +102,7 @@ def _serialize_plotly_figure(data: typing.Any) -> typing.Optional[tuple[str, str @check_extra("plot") -def _serialize_matplotlib(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_matplotlib(data: typing.Any) -> tuple[str, str] | None: try: import plotly except ImportError: @@ -122,7 +117,7 @@ def _serialize_matplotlib(data: typing.Any) -> typing.Optional[tuple[str, str]]: @check_extra("plot") -def _serialize_matplotlib_figure(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_matplotlib_figure(data: typing.Any) -> tuple[str, str] | None: try: import plotly except ImportError: @@ -136,7 +131,7 @@ def _serialize_matplotlib_figure(data: typing.Any) -> typing.Optional[tuple[str, return data, mimetype -def _serialize_numpy_array(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_numpy_array(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/vnd.simvue.numpy.v1" mfile = BytesIO() numpy.save(mfile, data, allow_pickle=False) @@ -145,7 +140,7 @@ def _serialize_numpy_array(data: typing.Any) -> typing.Optional[tuple[str, str]] return data, mimetype -def _serialize_dataframe(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_dataframe(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/vnd.simvue.df.v1" mfile = BytesIO() data.to_csv(mfile) @@ -155,7 +150,7 @@ def _serialize_dataframe(data: typing.Any) -> typing.Optional[tuple[str, str]]: @check_extra("torch") -def _serialize_torch_tensor(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_torch_tensor(data: typing.Any) -> tuple[str, str] | None: try: import torch except ImportError: @@ -170,7 +165,7 @@ def _serialize_torch_tensor(data: typing.Any) -> typing.Optional[tuple[str, str] return data, mimetype -def _serialize_json(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_json(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/json" try: mfile = BytesIO() @@ -182,7 +177,7 @@ def _serialize_json(data: typing.Any) -> typing.Optional[tuple[str, str]]: return data, mimetype -def _serialize_pickle(data: typing.Any) -> typing.Optional[tuple[str, str]]: +def _serialize_pickle(data: typing.Any) -> tuple[str, str] | None: mimetype = "application/octet-stream" data = pickle.dumps(data) return data, mimetype @@ -196,8 +191,6 @@ def deserialize_data( """ if mimetype == "application/vnd.plotly.v1+json": return _deserialize_plotly_figure(data) - elif mimetype == "application/vnd.plotly.v1+json": - return _deserialize_matplotlib_figure(data) elif mimetype == "application/vnd.simvue.numpy.v1": return _deserialize_numpy_array(data) elif mimetype == "application/vnd.simvue.df.v1": @@ -231,7 +224,7 @@ def _deserialize_matplotlib_figure(data: "Buffer") -> typing.Optional["Figure"]: return data -def _deserialize_numpy_array(data: "Buffer") -> typing.Optional[typing.Any]: +def _deserialize_numpy_array(data: "Buffer") -> typing.Any | None: mfile = BytesIO(data) mfile.seek(0) data = numpy.load(mfile, allow_pickle=False) @@ -257,11 +250,11 @@ def _deserialize_torch_tensor(data: "Buffer") -> typing.Optional["Tensor"]: return torch.load(mfile) -def _deserialize_pickle(data) -> typing.Optional[typing.Any]: +def _deserialize_pickle(data) -> typing.Any | None: data = pickle.loads(data) return data -def _deserialize_json(data) -> typing.Optional[typing.Any]: +def _deserialize_json(data) -> typing.Any | None: data = json.loads(data) return data diff --git a/simvue/system.py b/simvue/system.py index 7ea86eb9..84ce016b 100644 --- a/simvue/system.py +++ b/simvue/system.py @@ -67,8 +67,7 @@ def get_system() -> dict[str, typing.Any]: cpu = get_cpu_info() gpu = get_gpu_info() - system: dict[str, typing.Any] = {} - system["cwd"] = os.getcwd() + system: dict[str, typing.Any] = {"cwd": os.getcwd()} system["hostname"] = socket.gethostname() system["pythonversion"] = ( f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}" diff --git a/simvue/utilities.py b/simvue/utilities.py index 2159a666..69a8ecd8 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -2,7 +2,7 @@ import hashlib import logging import json -import sys +import mimetypes import tabulate import pydantic import importlib.util @@ -15,6 +15,7 @@ import jwt from datetime import timezone +from simvue.models import DATETIME_FORMAT CHECKSUM_BLOCK_SIZE = 4096 @@ -27,8 +28,8 @@ def find_first_instance_of_file( - file_names: typing.Union[list[str], str], check_user_space: bool = True -) -> typing.Optional[pathlib.Path]: + file_names: list[str] | str, check_user_space: bool = True +) -> pathlib.Path | None: """Traverses a file hierarchy from bottom upwards to find file Returns the first instance of 'file_names' found when moving @@ -36,7 +37,6 @@ def find_first_instance_of_file( Parameters ---------- - file_name: list[str] | str candidate names of file to locate check_user_space: bool, optional check the users home area if current working directory is not @@ -95,12 +95,10 @@ def parse_validation_response( obj_type: str = issue["type"] location: list[str] = issue["loc"] location.remove("body") - location_addr: str = "" - for i, loc in enumerate(location): - if isinstance(loc, int): - location_addr += f"[{loc}]" - else: - location_addr += f"{'.' if i > 0 else ''}{loc}" + location_addr: str = "".join( + (f"[{loc}]" if isinstance(loc, int) else f"{'.' if i > 0 else ''}{loc}") + for i, loc in enumerate(location) + ) headers = ["Type", "Location", "Message"] information = [obj_type, location_addr] @@ -110,7 +108,7 @@ def parse_validation_response( input_arg = body for loc in location: try: - input_arg = input_arg[loc] + input_arg = None if obj_type == "missing" else input_arg[loc] except TypeError: break information.append(input_arg) @@ -125,8 +123,8 @@ def parse_validation_response( def check_extra(extra_name: str) -> typing.Callable: def decorator( - class_func: typing.Optional[typing.Callable] = None, - ) -> typing.Optional[typing.Callable]: + class_func: typing.Callable | None = None, + ) -> typing.Callable | None: @functools.wraps(class_func) def wrapper(self, *args, **kwargs) -> typing.Any: if extra_name == "plot" and not all( @@ -139,14 +137,10 @@ def wrapper(self, *args, **kwargs) -> typing.Any: f"Plotting features require the '{extra_name}' extension to Simvue" ) elif extra_name == "torch": - if importlib.util.find_spec("torch"): + if not importlib.util.find_spec("torch"): raise RuntimeError( "PyTorch features require the 'torch' module to be installed" ) - if sys.version_info.minor > 12: - raise RuntimeError( - "PyTorch features are not yet supported for python>3.12" - ) elif extra_name not in EXTRAS: raise RuntimeError(f"Unrecognised extra '{extra_name}'") return class_func(self, *args, **kwargs) if class_func else None @@ -156,14 +150,44 @@ def wrapper(self, *args, **kwargs) -> typing.Any: return decorator -def parse_pydantic_error(class_name: str, error: pydantic.ValidationError) -> str: +def parse_pydantic_error(error: pydantic.ValidationError) -> str: out_table: list[str] = [] for data in json.loads(error.json()): + _input = data.get("input") if data["input"] is not None else "None" + if isinstance(_input, dict): + _input_str = json.dumps(_input, indent=2) + _input_str = "\n".join( + f"{line[:47]}..." if len(line) > 50 else line + for line in _input_str.split("\n") + ) + else: + _input_str = ( + _input_str + if len((_input_str := f"{_input}")) < 50 + else f"{_input_str[:50]}..." + ) + _type: str = data["type"] + + _skip_type_compare_for = ( + "error", + "missing", + "unexpected", + "union_tag", + "parsing", + "scheme", + "syntax", + ) + + if (_input_type := type(_input)) != _type and all( + e not in _type for e in _skip_type_compare_for + ): + _type = f"{_input_type.__name__} != {_type}" + out_table.append( [ - data.get("input") if data["input"] is not None else "None", + _input_str, data["loc"], - data["type"], + _type, data["msg"], ] ) @@ -172,13 +196,13 @@ def parse_pydantic_error(class_name: str, error: pydantic.ValidationError) -> st headers=["Input", "Location", "Type", "Message"], tablefmt="fancy_grid", ) - return f"`{class_name}` Validation:\n{err_table}" + return f"`{error.title}` Validation:\n{err_table}" def skip_if_failed( failure_attr: str, ignore_exc_attr: str, - on_failure_return: typing.Optional[typing.Any] = None, + on_failure_return: typing.Any | None = None, ) -> typing.Callable: """Decorator for ensuring if Simvue throws an exception any other code continues. @@ -219,7 +243,7 @@ def wrapper(self: "Run", *args, **kwargs) -> typing.Any: try: return class_func(self, *args, **kwargs) except pydantic.ValidationError as e: - error_str = parse_pydantic_error(class_func.__name__, e) + error_str = parse_pydantic_error(e) if getattr(self, ignore_exc_attr, True): setattr(self, failure_attr, True) logger.error(error_str) @@ -256,7 +280,7 @@ def wrapper(self, *args, **kwargs) -> typing.Any: try: return class_func(self, *args, **kwargs) except pydantic.ValidationError as e: - error_str = parse_pydantic_error(class_func.__name__, e) + error_str = parse_pydantic_error(e) raise RuntimeError(error_str) return wrapper @@ -284,11 +308,11 @@ def remove_file(filename: str) -> None: logger.error("Unable to remove file %s due to: %s", filename, str(err)) -def get_expiry(token) -> typing.Optional[int]: +def get_expiry(token) -> int | None: """ Get expiry date from a JWT token """ - expiry: typing.Optional[int] = None + expiry: int | None = None with contextlib.suppress(jwt.DecodeError): expiry = jwt.decode(token, options={"verify_signature": False})["exp"] @@ -307,7 +331,7 @@ def prepare_for_api(data_in, all=True): return data -def calculate_sha256(filename: str, is_file: bool) -> typing.Optional[str]: +def calculate_sha256(filename: str | typing.Any, is_file: bool) -> str | None: """ Calculate sha256 checksum of the specified file """ @@ -340,29 +364,7 @@ def validate_timestamp(timestamp): return True -def compare_alerts(first, second): - """ """ - for key in ("name", "description", "source", "frequency", "notification"): - if key in first and key in second: - if not first[key]: - continue - - if first[key] != second[key]: - return False - - if "alerts" in first and "alerts" in second: - for key in ("rule", "window", "metric", "threshold", "range_low", "range_high"): - if key in first["alerts"] and key in second["alerts"]: - if not first[key]: - continue - - if first["alerts"][key] != second["alerts"]["key"]: - return False - - return True - - -def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> str: +def simvue_timestamp(date_time: datetime.datetime | None = None) -> str: """Return the Simvue valid timestamp Parameters @@ -377,4 +379,19 @@ def simvue_timestamp(date_time: typing.Optional[datetime.datetime] = None) -> st """ if not date_time: date_time = datetime.datetime.now(timezone.utc) - return date_time.strftime("%Y-%m-%d %H:%M:%S.%f") + return date_time.strftime(DATETIME_FORMAT) + + +@functools.lru_cache +def get_mimetypes() -> list[str]: + """Returns a list of allowed MIME types""" + mimetypes.init() + _valid_mimetypes = ["application/vnd.plotly.v1+json"] + _valid_mimetypes += list(mimetypes.types_map.values()) + return _valid_mimetypes + + +def get_mimetype_for_file(file_path: pathlib.Path) -> str: + """Return MIME type for the given file""" + _guess, *_ = mimetypes.guess_type(file_path) + return _guess or "application/octet-stream" diff --git a/simvue/version.py b/simvue/version.py index 37d9c031..6c2f0889 100644 --- a/simvue/version.py +++ b/simvue/version.py @@ -11,6 +11,6 @@ pathlib.Path(os.path.dirname(__file__)).parents[1], "pyproject.toml" ) if os.path.exists(_metadata): - __version__ = toml.load(_metadata)["tool"]["poetry"]["version"] + __version__ = toml.load(_metadata)["project"]["version"] else: __version__ = "" diff --git a/tests/conftest.py b/tests/conftest.py index d239d3a5..f383f133 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +import contextlib +from numpy import fix import pytest import pytest_mock import typing @@ -8,7 +10,11 @@ import json import pathlib import logging +from simvue.api.objects.artifact import Artifact +from simvue.exception import ObjectNotFoundError import simvue.run as sv_run +import simvue.api.objects as sv_api_obj +import simvue.config.user as sv_cfg import simvue.utilities MAX_BUFFER_SIZE: int = 10 @@ -33,7 +39,6 @@ def clear_out_files() -> None: out_files += list(pathlib.Path.cwd().glob("test_*.err")) for file_obj in out_files: - print(file_obj) file_obj.unlink() @@ -54,7 +59,13 @@ def log_messages(caplog): @pytest.fixture def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: - yield run, setup_test_run(run, True, request) + _test_run_data = setup_test_run(run, True, request) + yield run, _test_run_data + with contextlib.suppress(ObjectNotFoundError): + sv_api_obj.Folder(identifier=run._folder.id).delete(recursive=True, delete_runs=True, runs_only=False) + for alert_id in _test_run_data.get("alert_ids", []): + with contextlib.suppress(ObjectNotFoundError): + sv_api_obj.Alert(identifier=alert_id).delete() clear_out_files() @@ -90,6 +101,17 @@ def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeyp clear_out_files() +@pytest.fixture +def create_run_object() -> sv_api_obj.Run: + _fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] + _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_fix_use_id}") + _folder.commit() + _run = sv_api_obj.Run.new(folder=f"/simvue_unit_testing/{_fix_use_id}") + yield _run + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + + def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.FixtureRequest, created_only: bool=False): fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] TEST_DATA = { @@ -107,7 +129,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur run.config(suppress_errors=False) run.init( - name=f"test_run_{TEST_DATA['metadata']['test_identifier']}", + name=f"test_run_{TEST_DATA['metadata']['test_identifier']}_{uuid.uuid4()}", tags=TEST_DATA["tags"], folder=TEST_DATA["folder"], visibility="tenant" if os.environ.get("CI") else None, @@ -120,37 +142,45 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur if run._dispatcher: run._dispatcher._max_buffer_size = MAX_BUFFER_SIZE + _alert_ids = [] + if create_objects: for i in range(5): run.log_event(f"{TEST_DATA['event_contains']} {i}") TEST_DATA['created_alerts'] = [] - for i in range(5): - run.create_alert(name=f"test_alert/alert_{i}", source="events", frequency=1, pattern=TEST_DATA['event_contains']) - TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}") - run.create_alert( - name='test_alert/value_below_1', - source='metrics', + for i in range(5): + _aid = run.create_event_alert( + name=f"test_alert/alert_{i}/{fix_use_id}", + frequency=1, + pattern=TEST_DATA['event_contains'] + ) + TEST_DATA['created_alerts'].append(f"test_alert/alert_{i}/{fix_use_id}") + _alert_ids.append(_aid) + + _ta_id = run.create_metric_threshold_alert( + name=f'test_alert/value_below_1/{fix_use_id}', frequency=1, rule='is below', threshold=1, metric='metric_counter', window=2 ) - run.create_alert( - name='test_alert/value_above_1', - source='metrics', + _mr_id = run.create_metric_range_alert( + name=f'test_alert/value_within_1/{fix_use_id}', frequency=1, - rule='is above', - threshold=1, + rule = "is inside range", + range_low = 2, + range_high = 5, metric='metric_counter', window=2 ) + _alert_ids += [_ta_id, _mr_id] TEST_DATA['created_alerts'] += [ - "test_alert/value_above_1", - "test_alert/value_below_1" + f"test_alert/value_below_1/{fix_use_id}", + f"test_alert/value_within_1/{fix_use_id}" ] for i in range(5): @@ -160,6 +190,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur if create_objects: TEST_DATA["metrics"] = ("metric_counter", "metric_val") + TEST_DATA["run_id"] = run._id TEST_DATA["run_name"] = run._name TEST_DATA["url"] = run._user_config.server.url @@ -183,9 +214,21 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur out_f.write( "print('Hello World!')" ) - run.save_file(test_script, category="code", name="test_empty_file") - TEST_DATA["file_3"] = "test_empty_file" + run.save_file(test_script, category="code", name="test_code_upload") + TEST_DATA["file_3"] = "test_code_upload" + + TEST_DATA["alert_ids"] = _alert_ids - time.sleep(1.) return TEST_DATA + +@pytest.fixture +def offline_test() -> pathlib.Path: + with tempfile.TemporaryDirectory() as tempd: + _tempdir = pathlib.Path(tempd) + _cache_dir = _tempdir.joinpath(".simvue") + _cache_dir.mkdir(exist_ok=True) + os.environ["SIMVUE_OFFLINE_DIRECTORY"] = f"{_cache_dir}" + assert sv_cfg.SimvueConfiguration.fetch().offline.cache == _cache_dir + yield _tempdir + diff --git a/tests/example_data/Project.toml b/tests/example_data/Project.toml new file mode 100644 index 00000000..eeaae7c5 --- /dev/null +++ b/tests/example_data/Project.toml @@ -0,0 +1,19 @@ +name = "Julia Demo Project" +uuid = "31b09h27-d3fd-4268-8c4f-7ab0a8rbc582" +authors = ["Joe Bloggs =0.3.7", - "requests>=2.31.0", - "msgpack>=1.0.7", - "tenacity>=8.2.3,<10.0.0", - "PyJWT>=2.8.0", - "psutil>=5.9.8", - "pydantic>=2.5.3", - "pandas>=2.2.0", - "plotly>=5.18.0", - "numpy>=1.26.3,<3.0.0", - "matplotlib>=3.8.2", - "typing_extensions>=4.11.0", - "toml>=0.10.2", - "click>=8.1.7", - "gitpython>=3.1.43", - "humanfriendly>=10.0", - "tabulate>=0.9.0", - "randomname>=0.2.1", -] diff --git a/tests/example_data/python_poetry/poetry.lock b/tests/example_data/python_poetry/poetry.lock new file mode 100644 index 00000000..28836471 --- /dev/null +++ b/tests/example_data/python_poetry/poetry.lock @@ -0,0 +1,376 @@ +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "numpy" +version = "2.2.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.13" +content-hash = "fabc966f49e79f5fc297c4ba1778f211db403b4125ea6224ada97c8664f88ea3" diff --git a/tests/example_data/python_poetry/pyproject.toml b/tests/example_data/python_poetry/pyproject.toml new file mode 100644 index 00000000..6fc8a220 --- /dev/null +++ b/tests/example_data/python_poetry/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "example-repo" +version = "0.1.0" +description = "" +authors = ["Kristian Zarębski "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.13" +numpy = "^2.2.0" +pandas = "^2.2.3" +requests = "^2.32.3" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/example_data/python_uv/pyproject.toml b/tests/example_data/python_uv/pyproject.toml new file mode 100644 index 00000000..124f9f0c --- /dev/null +++ b/tests/example_data/python_uv/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = "example-repo" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "numpy>=2.2.0", + "pandas>=2.2.3", + "requests>=2.32.3", +] diff --git a/tests/example_data/python_uv/uv.lock b/tests/example_data/python_uv/uv.lock new file mode 100644 index 00000000..5c947580 --- /dev/null +++ b/tests/example_data/python_uv/uv.lock @@ -0,0 +1,179 @@ +version = 1 +requires-python = ">=3.13" + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "example-repo" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "numpy" }, + { name = "pandas" }, + { name = "requests" }, +] + +[package.metadata] +requires-dist = [ + { name = "numpy", specifier = ">=2.2.0" }, + { name = "pandas", specifier = ">=2.2.3" }, + { name = "requests", specifier = ">=2.32.3" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "numpy" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1d565e0f6e156e1522ab564176b8b29d71e13d8caf003a08768df3d5cec5/numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0", size = 20225497 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/4c/0d1eef206545c994289e7a9de21b642880a11e0ed47a2b0c407c688c4f69/numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367", size = 20895707 }, + { url = "https://files.pythonhosted.org/packages/16/cb/88f6c1e6df83002c421d5f854ccf134aa088aa997af786a5dac3f32ec99b/numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae", size = 14110592 }, + { url = "https://files.pythonhosted.org/packages/b4/54/817e6894168a43f33dca74199ba0dd0f1acd99aa6323ed6d323d63d640a2/numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69", size = 5110858 }, + { url = "https://files.pythonhosted.org/packages/c7/99/00d8a1a8eb70425bba7880257ed73fed08d3e8d05da4202fb6b9a81d5ee4/numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13", size = 6645143 }, + { url = "https://files.pythonhosted.org/packages/34/86/5b9c2b7c56e7a9d9297a0a4be0b8433f498eba52a8f5892d9132b0f64627/numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671", size = 14042812 }, + { url = "https://files.pythonhosted.org/packages/df/54/13535f74391dbe5f479ceed96f1403267be302c840040700d4fd66688089/numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571", size = 16093419 }, + { url = "https://files.pythonhosted.org/packages/dd/37/dfb2056842ac61315f225aa56f455da369f5223e4c5a38b91d20da1b628b/numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d", size = 15238969 }, + { url = "https://files.pythonhosted.org/packages/5a/3d/d20d24ee313992f0b7e7b9d9eef642d9b545d39d5b91c4a2cc8c98776328/numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742", size = 17855705 }, + { url = "https://files.pythonhosted.org/packages/5b/40/944c9ee264f875a2db6f79380944fd2b5bb9d712bb4a134d11f45ad5b693/numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e", size = 6270078 }, + { url = "https://files.pythonhosted.org/packages/30/04/e1ee6f8b22034302d4c5c24e15782bdedf76d90b90f3874ed0b48525def0/numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2", size = 12605791 }, + { url = "https://files.pythonhosted.org/packages/ef/fb/51d458625cd6134d60ac15180ae50995d7d21b0f2f92a6286ae7b0792d19/numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95", size = 20920160 }, + { url = "https://files.pythonhosted.org/packages/b4/34/162ae0c5d2536ea4be98c813b5161c980f0443cd5765fde16ddfe3450140/numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c", size = 14119064 }, + { url = "https://files.pythonhosted.org/packages/17/6c/4195dd0e1c41c55f466d516e17e9e28510f32af76d23061ea3da67438e3c/numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca", size = 5152778 }, + { url = "https://files.pythonhosted.org/packages/2f/47/ea804ae525832c8d05ed85b560dfd242d34e4bb0962bc269ccaa720fb934/numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d", size = 6667605 }, + { url = "https://files.pythonhosted.org/packages/76/99/34d20e50b3d894bb16b5374bfbee399ab8ff3a33bf1e1f0b8acfe7bbd70d/numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529", size = 14013275 }, + { url = "https://files.pythonhosted.org/packages/69/8f/a1df7bd02d434ab82539517d1b98028985700cfc4300bc5496fb140ca648/numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3", size = 16074900 }, + { url = "https://files.pythonhosted.org/packages/04/94/b419e7a76bf21a00fcb03c613583f10e389fdc8dfe420412ff5710c8ad3d/numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab", size = 15219122 }, + { url = "https://files.pythonhosted.org/packages/65/d9/dddf398b2b6c5d750892a207a469c2854a8db0f033edaf72103af8cf05aa/numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72", size = 17851668 }, + { url = "https://files.pythonhosted.org/packages/d4/dc/09a4e5819a9782a213c0eb4eecacdc1cd75ad8dac99279b04cfccb7eeb0a/numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066", size = 6325288 }, + { url = "https://files.pythonhosted.org/packages/ce/e1/e0d06ec34036c92b43aef206efe99a5f5f04e12c776eab82a36e00c40afc/numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881", size = 12692303 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "tzdata" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +] diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index c8a7ba72..3b6c6f24 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -1,15 +1,17 @@ from logging import critical -from numpy import tri import pytest import uuid import random import os.path import typing import glob +import pathlib import time import tempfile import simvue.client as svc +from simvue.exception import ObjectNotFoundError import simvue.run as sv_run +import simvue.api.objects as sv_api_obj @pytest.mark.dependency @@ -32,12 +34,13 @@ def test_get_alerts(create_test_run: tuple[sv_run.Run, dict], from_run: bool) -> triggered_alerts_full = client.get_alerts(run_id=create_test_run[1]["run_id"], critical_only=False, names_only=False) assert len(triggered_alerts_full) == 7 for alert in triggered_alerts_full: - if alert["alert"].get("name") == "value_above_1": + if alert.name == "value_above_1": assert alert["alert"]["status"]["current"] == "critical" else: assert (triggered_alerts_full := client.get_alerts(names_only=True, critical_only=False)) - print(triggered_alerts_full, run_data["created_alerts"]) - assert all(a in triggered_alerts_full for a in run_data['created_alerts']) + + for alert in run_data["created_alerts"]: + assert alert in triggered_alerts_full, f"Alert '{alert}' was not triggered" @pytest.mark.dependency @@ -52,9 +55,17 @@ def test_get_run_id_from_name(create_test_run: tuple[sv_run.Run, dict]) -> None: @pytest.mark.dependency @pytest.mark.client -@pytest.mark.parametrize("aggregate", (True, False), ids=("aggregate", "complete")) +@pytest.mark.parametrize( + "aggregate,use_name_labels", + [ + (True, False), + (False, False), + (False, True) + ], + ids=("aggregate", "complete_ids", "complete_labels") +) def test_get_metric_values( - create_test_run: tuple[sv_run.Run, dict], aggregate: bool + create_test_run: tuple[sv_run.Run, dict], aggregate: bool, use_name_labels: bool ) -> None: client = svc.Client() time.sleep(0.5) @@ -62,6 +73,7 @@ def test_get_metric_values( run_ids=[create_test_run[1]["run_id"]], metric_names=[create_test_run[1]["metrics"][0]], xaxis="step", + use_run_names=use_name_labels, aggregate=aggregate, output_format="dict", ) @@ -70,12 +82,12 @@ def test_get_metric_values( _first_entry: dict = next(iter(_metrics_dict.values())) assert create_test_run[1]["metrics"][0] in _metrics_dict.keys() if aggregate: - _value_types = set(i[1] for i in _first_entry.keys()) + _value_types = {i[1] for i in _first_entry} assert all( i in _value_types for i in ("average", "min", "max") ), f"Expected ('average', 'min', 'max') in {_value_types}" - else: - _runs = set(i[1] for i in _first_entry.keys()) + elif not use_name_labels: + _runs = {i[1] for i in _first_entry} assert create_test_run[1]["run_id"] in _runs @@ -97,9 +109,9 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None: @pytest.mark.dependency @pytest.mark.client -def test_get_artifacts(create_test_run: tuple[sv_run.Run, dict]) -> None: +def test_get_artifacts_entries(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() - assert client.list_artifacts(create_test_run[1]["run_id"]) + assert dict(client.list_artifacts(create_test_run[1]["run_id"])) assert client.get_artifact(create_test_run[1]["run_id"], name="test_attributes") @@ -111,14 +123,13 @@ def test_get_artifact_as_file( ) -> None: with tempfile.TemporaryDirectory() as tempd: client = svc.Client() + _file_name = create_test_run[1][f"file_{file_id}"] client.get_artifact_as_file( create_test_run[1]["run_id"], - name=create_test_run[1][f"file_{file_id}"], - path=tempd, + name=_file_name, + output_dir=tempd, ) - assert create_test_run[1][f"file_{file_id}"] in [ - os.path.basename(i) for i in glob.glob(os.path.join(tempd, "*")) - ] + assert pathlib.Path(tempd).joinpath(_file_name).exists(), f"Failed to download '{_file_name}'" @pytest.mark.dependency @@ -131,7 +142,7 @@ def test_get_artifacts_as_files( with tempfile.TemporaryDirectory() as tempd: client = svc.Client() client.get_artifacts_as_files( - create_test_run[1]["run_id"], category=category, path=tempd + create_test_run[1]["run_id"], category=category, output_dir=tempd ) files = [os.path.basename(i) for i in glob.glob(os.path.join(tempd, "*"))] if not category or category == "input": @@ -144,9 +155,16 @@ def test_get_artifacts_as_files( @pytest.mark.dependency @pytest.mark.client -def test_get_runs(create_test_run: tuple[sv_run.Run, dict]) -> None: +@pytest.mark.parametrize("output_format", ("dict", "dataframe", "objects")) +def test_get_runs(create_test_run: tuple[sv_run.Run, dict], output_format: str) -> None: client = svc.Client() - assert client.get_runs(filters=None) + + _result = client.get_runs(filters=None, output_format=output_format, count_limit=10) + + if output_format == "dataframe": + assert not _result.empty + else: + assert _result @pytest.mark.dependency @@ -161,8 +179,9 @@ def test_get_run(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_get_folder(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() assert (folders := client.get_folders()) - assert (folder_id := folders[1].get("path")) - assert client.get_folder(folder_id) + _id, _folder = next(folders) + assert _folder.path + assert client.get_folder(_folder.path) @pytest.mark.dependency @@ -170,8 +189,7 @@ def test_get_folder(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_get_metrics_names(create_test_run: tuple[sv_run.Run, dict]) -> None: client = svc.Client() time.sleep(1) - assert client.get_metrics_names(create_test_run[1]["run_id"]) - + assert list(client.get_metrics_names(create_test_run[1]["run_id"])) @pytest.mark.dependency @@ -180,7 +198,7 @@ def test_get_tag(create_plain_run: tuple[sv_run.Run, dict]) -> None: _, run_data = create_plain_run client = svc.Client() time.sleep(1.0) - assert any(tag["name"] == run_data["tags"][-1] for tag in client.get_tags()) + assert any(tag.name == run_data["tags"][-1] for _, tag in client.get_tags()) PRE_DELETION_TESTS: list[str] = [ @@ -225,7 +243,7 @@ def test_get_tags(create_plain_run: tuple[sv_run.Run, dict]) -> None: run.close() time.sleep(1.0) client = svc.Client() - retrieved = [t["name"] for t in client.get_tags()] + retrieved = [t.name for _, t in client.get_tags()] assert all(t in retrieved for t in tags) @@ -244,13 +262,13 @@ def test_folder_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_run_folder_metadata_find(create_plain_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run rand_val = random.randint(0, 1000) - run.set_folder_details(path=run_data["folder"], metadata={'atest': rand_val}) + run.set_folder_details(metadata={'atest': rand_val}) run.close() time.sleep(1.0) client = svc.Client() data = client.get_folders(filters=[f'metadata.atest == {rand_val}']) - assert run_data["folder"] in [i["path"] for i in data] + assert run_data["folder"] in [i.path for _, i in data] @pytest.mark.client @@ -264,7 +282,7 @@ def test_tag_deletion(create_plain_run: tuple[sv_run.Run, dict]) -> None: tags = client.get_tags() client.delete_run(run.id) time.sleep(1.0) - tag_identifier = [tag["id"] for tag in tags if tag["name"] == f"delete_me_{unique_id}"][0] + tag_identifier = [identifier for identifier, tag in tags if tag.name == f"delete_me_{unique_id}"][0] client.delete_tag(tag_identifier) time.sleep(1.0) assert not client.get_tag(tag_identifier) @@ -306,3 +324,34 @@ def test_multiple_metric_retrieval( aggregate=aggregate, output_format=output_format, ) + + +@pytest.mark.client +def test_alert_deletion() -> None: + _alert = sv_api_obj.UserAlert.new(name="test_alert", notification="none", description=None) + _alert.commit() + _client = svc.Client() + time.sleep(1) + _client.delete_alert(alert_id=_alert.id) + + with pytest.raises(ObjectNotFoundError) as e: + sv_api_obj.Alert(identifier=_alert.id) + + +@pytest.mark.client +def test_abort_run() -> None: + _uuid = f"{uuid.uuid4()}".split("-")[0] + _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_uuid}") + _run = sv_api_obj.Run.new(folder=f"/simvue_unit_testing/{_uuid}") + _run.status = "running" + _folder.commit() + _run.commit() + time.sleep(1) + _client = svc.Client() + _client.abort_run(_run.id, reason="Test abort") + time.sleep(1) + assert _run.abort_trigger + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index ce889500..29534e5e 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -92,10 +92,7 @@ def test_config_setup( def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_config_file, **__) -> str: - if "pyproject.toml" in file_names: - return ppt_file - else: - return conf_file + return ppt_file if "pyproject.toml" in file_names else conf_file mocker.patch("simvue.config.user.sv_util.find_first_instance_of_file", _mocked_find) @@ -117,15 +114,15 @@ def _mocked_find(file_names: list[str], *_, ppt_file=_ppt_file, conf_file=_confi assert _config.config_file() == _config_file if use_env: - assert _config.server.url == _other_url - assert _config.server.token == _other_token + assert _config.server.url == f"{_other_url}api" + assert _config.server.token.get_secret_value() == _other_token elif use_args: - assert _config.server.url == _arg_url - assert _config.server.token == _arg_token + assert _config.server.url == f"{_arg_url}api" + assert _config.server.token.get_secret_value() == _arg_token elif use_file and use_file != "pyproject.toml": - assert _config.server.url == _url - assert _config.server.token == _token - assert _config.offline.cache == temp_d + assert _config.server.url == f"{_url}api" + assert _config.server.token.get_secret_value() == _token + assert f"{_config.offline.cache}" == temp_d if use_file == "extended": assert _config.run.description == _description diff --git a/tests/functional/test_dispatch.py b/tests/functional/test_dispatch.py index 2ac15cbb..de77ccb2 100644 --- a/tests/functional/test_dispatch.py +++ b/tests/functional/test_dispatch.py @@ -24,9 +24,9 @@ @pytest.mark.parametrize("multiple", (True, False), ids=("multiple", "single")) def test_queued_dispatcher(overload_buffer: bool, multiple: bool, append_during_dispatch: bool) -> None: buffer_size: int = 10 - n_elements: int = buffer_size - 1 if not overload_buffer else 2 * buffer_size + n_elements: int = 2 * buffer_size if overload_buffer else buffer_size - 1 max_read_rate: float = 0.2 - time_threshold: float = 1 if not overload_buffer else 1 + (1 / max_read_rate) + time_threshold: float = 1 + (1 / max_read_rate) if overload_buffer else 1 start_time = time.time() @@ -80,7 +80,7 @@ def test_nested_queued_dispatch(multi_queue: bool) -> None: buffer_size: int = 10 n_elements: int = 2 * buffer_size max_read_rate: float = 0.2 - variable: str | list[str] = "demo" if not multi_queue else ["events", "metrics"] + variable: str | list[str] = ["events", "metrics"] if multi_queue else "demo" result_queue = Queue() @@ -111,7 +111,7 @@ def _main(res_queue, index, dispatch_callback=create_callback, term_event=event, dispatcher.add_item({string.ascii_uppercase[i % 26]: i}, var, False) except(RuntimeError): res_queue.put("AARGHGHGHGHAHSHGHSDHFSEDHSE") - + time.sleep(0.1) while not dispatcher.empty: diff --git a/tests/functional/test_metadata.py b/tests/functional/test_metadata.py deleted file mode 100644 index 9e095c19..00000000 --- a/tests/functional/test_metadata.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest -import pathlib -import re -import simvue.metadata as sv_meta - - -@pytest.mark.metadata -def test_cargo_env() -> None: - metadata = sv_meta._rust_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) - assert metadata["rust.environment.serde"] == "1.0.123" - assert metadata["rust.project.name"] == "example_project" - -@pytest.mark.metadata -def test_python_env() -> None: - metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) - assert re.findall(r"\d+\.\d+\.\d+", metadata["python.environment.click"]) - assert metadata["python.project.name"] == "spam-eggs" - diff --git a/tests/functional/test_run_artifact_upload.py b/tests/functional/test_run_artifact_upload.py new file mode 100644 index 00000000..b870e0c7 --- /dev/null +++ b/tests/functional/test_run_artifact_upload.py @@ -0,0 +1,40 @@ +""" +Low Level API: Run Class +""" + +import uuid +import pathlib +import tempfile +import pytest + +from simvue.api.objects import Run, FileArtifact, storage +from simvue.api.objects.folder import Folder + + +@pytest.mark.api +def test_add_artifact_to_run() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder = Folder.new(path=f"/simvue_unit_testing/{_uuid}") + _folder.commit() + _run = Run.new(folder=f"/simvue_unit_testing/{_uuid}") + _run.status = "running" + _run.commit() + + with tempfile.NamedTemporaryFile() as tempf: + with open(tempf.name, "w") as in_f: + in_f.write("Hello") + + _artifact = FileArtifact.new( + name=f"test_{_uuid}", + storage=None, + file_path=pathlib.Path(tempf.name), + mime_type=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + _run.status = "completed" + _run.commit() + assert _run.artifacts + _run.delete() + _folder.delete(recursive=True, delete_runs=True) + diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index db6912a6..5de51a8a 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1,5 +1,6 @@ import os from os.path import basename +from numpy import identity import pytest import pytest_mock import time @@ -15,10 +16,14 @@ import random import simvue +from simvue.api.objects.alert.fetch import Alert +from simvue.exception import SimvueRunError import simvue.run as sv_run import simvue.client as sv_cl import simvue.sender as sv_send +from simvue.api.objects import Run as RunObject + if typing.TYPE_CHECKING: from .conftest import CountingLogHandler @@ -26,7 +31,9 @@ @pytest.mark.run def test_created_run() -> None: with sv_run.Run() as run_created: - run_created.init(running=False) + run_created.init(running=False, retention_period="1 min") + _run = RunObject(identifier=run_created.id) + assert _run.status == "created" @pytest.mark.run @@ -40,6 +47,17 @@ def test_check_run_initialised_decorator() -> None: assert "Simvue Run must be initialised" in str(e.value) +@pytest.mark.run +@pytest.mark.codecarbon +def test_run_with_emissions() -> None: + with sv_run.Run() as run_created: + run_created.init(retention_period="1 min") + run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) + time.sleep(5) + _run = RunObject(identifier=run_created.id) + assert list(_run.metrics) + + @pytest.mark.run @pytest.mark.parametrize("overload_buffer", (True, False), ids=("overload", "normal")) @pytest.mark.parametrize( @@ -50,7 +68,7 @@ def test_log_metrics( setup_logging: "CountingLogHandler", mocker, request: pytest.FixtureRequest, - visibility: typing.Union[typing.Literal["public", "tenant"], list[str], None], + visibility: typing.Literal["public", "tenant"] | list[str] | None, ) -> None: METRICS = {"a": 10, "b": 1.2} @@ -62,7 +80,7 @@ def test_log_metrics( run.config(suppress_errors=False) if visibility == "bad_option": - with pytest.raises(RuntimeError): + with pytest.raises(SimvueRunError, match="visibility") as e: run.init( name=f"test_run_{str(uuid.uuid4()).split('-', 1)[0]}", tags=[ @@ -94,10 +112,10 @@ def test_log_metrics( if overload_buffer: for i in range(run._dispatcher._max_buffer_size * 3): - run.log_metrics({key: i for key in METRICS.keys()}) + run.log_metrics({key: i for key in METRICS}) else: run.log_metrics(METRICS) - time.sleep(1.0 if not overload_buffer else 2.0) + time.sleep(2.0 if overload_buffer else 1.0) run.close() client = sv_cl.Client() _data = client.get_metric_values( @@ -108,37 +126,40 @@ def test_log_metrics( ) with contextlib.suppress(RuntimeError): - client.delete_run(run._id) + client.delete_run(run._id) + + assert _data assert sorted(set(METRICS.keys())) == sorted(set(_data.keys())) _steps = [] for entry in _data.values(): - _steps += list(i[0] for i in entry.keys()) + _steps += [i[0] for i in entry.keys()] _steps = set(_steps) + assert ( - len(_steps) == 1 - if not overload_buffer - else run._dispatcher._max_buffer_size * 3 + len(_steps) == (run._dispatcher._max_buffer_size * 3 if overload_buffer else 1) ) - - # Check metrics have been set - assert setup_logging.counts[0] == 1 if not overload_buffer else 3 + # There are two debug log messages per metric dispatch - 'Executing callback on buffer' and 'Posting staged data' + # Should have done one dispatch if not overloaded, and 3 dispatches if overloaded + assert setup_logging.counts[0] == (6 if overload_buffer else 2) # Check heartbeat has been called at least once (so sysinfo sent) assert setup_logging.counts[1] > 0 @pytest.mark.run +@pytest.mark.offline def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: METRICS = {"a": 10, "b": 1.2, "c": 2} run, _ = create_plain_run_offline + run_name = run._name run.log_metrics(METRICS) - run_id, *_ = sv_send.sender() - time.sleep(1.0) + time.sleep(1) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() client = sv_cl.Client() _data = client.get_metric_values( - run_ids=[run_id], + run_ids=[client.get_run_id_from_name(run_name)], metric_names=list(METRICS.keys()), xaxis="step", aggregate=False, @@ -146,7 +167,7 @@ def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) assert sorted(set(METRICS.keys())) == sorted(set(_data.keys())) _steps = [] for entry in _data.values(): - _steps += list(i[0] for i in entry.keys()) + _steps += [i[0] for i in entry.keys()] _steps = set(_steps) assert ( len(_steps) == 1 @@ -154,7 +175,7 @@ def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) @pytest.mark.run -def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None: +def test_log_events_online(create_test_run: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello world!" run, _ = create_test_run run.log_event(EVENT_MSG) @@ -165,29 +186,35 @@ def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None: assert event_data[0].get("message", EVENT_MSG) - @pytest.mark.run +@pytest.mark.offline def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello offline world!" run, _ = create_plain_run_offline + run_name = run._name run.log_event(EVENT_MSG) - run_id, *_ = sv_send.sender() + time.sleep(1) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() - time.sleep(1.0) client = sv_cl.Client() - event_data = client.get_events(run_id, count_limit=1) + event_data = client.get_events(client.get_run_id_from_name(run_name), count_limit=1) assert event_data[0].get("message", EVENT_MSG) @pytest.mark.run +@pytest.mark.offline def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run_offline - run_id, *_ = sv_send.sender() - run.close() time.sleep(1.0) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + run.close() client = sv_cl.Client() tags = client.get_tags() - assert run_data["tags"][-1] in [tag["name"] for tag in tags] + + # Find tag + run_tags = [tag for tag in tags if tag[1].name == run_data["tags"][-1]] + assert len(run_tags) == 1 + client.delete_tag(run_tags[0][0]) @@ -202,7 +229,7 @@ def test_update_metadata_running(create_test_run: tuple[sv_run.Run, dict]) -> No run_info = client.get_run(run.id) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run @@ -215,24 +242,26 @@ def test_update_metadata_created(create_pending_run: tuple[sv_run.Run, dict]) -> run_info = client.get_run(run.id) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run +@pytest.mark.offline def test_update_metadata_offline( create_plain_run_offline: tuple[sv_run.Run, dict], ) -> None: METADATA = {"a": 10, "b": 1.2, "c": "word"} run, _ = create_plain_run_offline + run_name = run._name run.update_metadata(METADATA) - run_id, *_ = sv_send.sender() + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() time.sleep(1.0) client = sv_cl.Client() - run_info = client.get_run(run_id) + run_info = client.get_run(client.get_run_id_from_name(run_name)) for key, value in METADATA.items(): - assert run_info.get("metadata", {}).get(key) == value + assert run_info.metadata.get(key) == value @pytest.mark.run @@ -434,29 +463,34 @@ def test_set_folder_details(request: pytest.FixtureRequest) -> None: request.node.name.replace("[", "_").replace("]", "_"), ] run.init(folder=folder_name) - run.set_folder_details(path=folder_name, tags=tags, description=description) + run.set_folder_details(tags=tags, description=description) client = sv_cl.Client() - assert sorted((folder := client.get_folders(filters=[f"path == {folder_name}"])[0])["tags"]) == sorted(tags) - assert folder["description"] == description + _folder = client.get_folder(folder_path=folder_name) + + assert _folder.tags + assert sorted(_folder.tags) == sorted(tags) + + assert _folder.description == description @pytest.mark.run @pytest.mark.parametrize( - "valid_mimetype", (True, False), ids=("valid_mime", "invalid_mime") -) -@pytest.mark.parametrize( - "preserve_path", (True, False), ids=("preserve_path", "modified_path") + "valid_mimetype,preserve_path,name,allow_pickle,empty_file,category", + [ + (True, False, None, False, False, "input"), + (False, True, None, False, False, "output"), + (False, False, "test_file", False, False, "code"), + (False, False, None, True, False, "input"), + (False, False, None, False, True, "code") + ], + ids=[f"scenario_{i}" for i in range(1, 6)] ) -@pytest.mark.parametrize("name", ("test_file", None), ids=("named", "nameless")) -@pytest.mark.parametrize("allow_pickle", (True, False), ids=("pickled", "unpickled")) -@pytest.mark.parametrize("empty_file", (True, False), ids=("empty", "content")) -@pytest.mark.parametrize("category", ("input", "output", "code")) def test_save_file_online( create_plain_run: typing.Tuple[sv_run.Run, dict], valid_mimetype: bool, preserve_path: bool, - name: typing.Optional[str], + name: str | None, allow_pickle: bool, empty_file: bool, category: typing.Literal["input", "output", "code"], @@ -469,13 +503,13 @@ def test_save_file_online( (out_name := pathlib.Path(tempd).joinpath("test_file.txt")), "w", ) as out_f: - out_f.write("test data entry" if not empty_file else "") + out_f.write("" if empty_file else "test data entry") if valid_mimetype: simvue_run.save_file( out_name, category=category, - filetype=file_type, + file_type=file_type, preserve_path=preserve_path, name=name, ) @@ -484,19 +518,12 @@ def test_save_file_online( simvue_run.save_file( out_name, category=category, - filetype=file_type, + file_type=file_type, preserve_path=preserve_path, ) return variable = capfd.readouterr() - with capfd.disabled(): - if empty_file: - assert ( - variable.out - == "[simvue] WARNING: saving zero-sized files not currently supported\n" - ) - return simvue_run.close() time.sleep(1.0) os.remove(out_name) @@ -509,23 +536,34 @@ def test_save_file_online( out_loc = pathlib.Path(tempd) stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) - client.get_artifact_as_file(run_id=simvue_run.id, name=f"{name or stored_name}", path=tempd) - assert out_loc.joinpath(name if name else out_name.name).exists() + client.get_artifact_as_file(run_id=simvue_run.id, name=f"{name or stored_name}", output_dir=tempd) + assert out_loc.joinpath(name or out_name.name).exists() @pytest.mark.run +@pytest.mark.offline @pytest.mark.parametrize( - "preserve_path", (True, False), ids=("preserve_path", "modified_path") + "preserve_path,name,allow_pickle,empty_file,category", + [ + (False, None, False, False, "input"), + (True, None, False, False, "output"), + (False, "test_file", False, False, "code"), + (False, None, True, False, "input"), + (False, None, False, True, "code") + ], + ids=[f"scenario_{i}" for i in range(1, 6)] ) -@pytest.mark.parametrize("name", ("retrieved_test_file", None), ids=("named", "nameless")) -@pytest.mark.parametrize("category", ("input", "output", "code")) def test_save_file_offline( - create_plain_run_offline: tuple[sv_run.Run, dict], + create_plain_run_offline: typing.Tuple[sv_run.Run, dict], preserve_path: bool, - name: typing.Optional[str], - category: typing.Literal["input", "output", "code"] + name: str | None, + allow_pickle: bool, + empty_file: bool, + category: typing.Literal["input", "output", "code"], + capfd, ) -> None: simvue_run, _ = create_plain_run_offline + run_name = simvue_run._name file_type: str = "text/plain" with tempfile.TemporaryDirectory() as tempd: with open( @@ -537,15 +575,22 @@ def test_save_file_offline( simvue_run.save_file( out_name, category=category, + file_type=file_type, preserve_path=preserve_path, name=name, ) - run_id, *_ = sv_send.sender() + + simvue_run.save_file( + out_name, + category=category, + preserve_path=preserve_path, + name=name, + ) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) simvue_run.close() time.sleep(1.0) os.remove(out_name) client = sv_cl.Client() - assert run_id base_name = name or out_name.name if preserve_path: out_loc = pathlib.Path(tempd) / out_name.parent @@ -554,8 +599,8 @@ def test_save_file_offline( out_loc = pathlib.Path(tempd) stored_name = pathlib.Path(base_name) out_file = out_loc.joinpath(name or out_name.name) - client.get_artifact_as_file(run_id=run_id, name=f"{name or stored_name}", path=tempd) - assert out_loc.joinpath(name if name else out_name.name).exists() + client.get_artifact_as_file(run_id=client.get_run_id_from_name(run_name), name=f"{name or stored_name}", output_dir=tempd) + assert out_loc.joinpath(name or out_name.name).exists() @pytest.mark.run @@ -575,13 +620,13 @@ def test_update_tags_running( time.sleep(1) client = sv_cl.Client() run_data = client.get_run(simvue_run._id) - assert run_data["tags"] == tags + assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) time.sleep(1) run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags + ["additional"]) + assert sorted(run_data.tags) == sorted(tags + ["additional"]) @pytest.mark.run @@ -601,13 +646,13 @@ def test_update_tags_created( time.sleep(1) client = sv_cl.Client() run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags) + assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) time.sleep(1) run_data = client.get_run(simvue_run._id) - assert sorted(run_data["tags"]) == sorted(tags + ["additional"]) + assert sorted(run_data.tags) == sorted(tags + ["additional"]) @pytest.mark.run @@ -667,11 +712,11 @@ def abort_callback(abort_run=trigger) -> None: assert run._resources_metrics_interval == 1 for child in child_processes: assert not child.is_running() - if not run._status == "terminated": + if run._status != "terminated": run.kill_all_processes() raise AssertionError("Run was not terminated") assert trigger.is_set() - + @pytest.mark.run def test_abort_on_alert_python(create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture) -> None: @@ -707,13 +752,17 @@ def testing_exit(status: int) -> None: run.config(resources_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True - alert_id = run.create_alert("abort_test", source="user", trigger_abort=True) + alert_id = run.create_user_alert("abort_test", trigger_abort=True) run.add_process(identifier="forever_long", executable="bash", c="sleep 10") time.sleep(2) run.log_alert(alert_id, "critical") + _alert = Alert(identifier=alert_id) + time.sleep(1) + assert _alert.get_status(run.id) == "critical" counter = 0 while run._status != "terminated" and counter < 15: time.sleep(1) + assert run._sv_obj.abort_trigger, "Abort trigger was not set" counter += 1 if counter >= 15: run.kill_all_processes() diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index de16b8ac..6c071809 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -7,13 +7,17 @@ import simvue.sender as sv_send from simvue import Run, Client +from simvue.sender import sender @pytest.mark.executor +@pytest.mark.offline def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): + _run: Run _run, _ = create_plain_run_offline _run.add_process("process_1", "Hello world!", executable="echo", n=True) _run.add_process("process_2", "bash" if sys.platform != "win32" else "powershell", debug=True, c="exit 0") _run.add_process("process_3", "ls", "-ltr") + sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) @pytest.mark.executor @@ -32,7 +36,7 @@ def test_abort_all_processes(create_plain_run: tuple[Run, dict]) -> None: _run.add_process(f"process_{i}", executable="python", script=temp_f.name) assert _run.executor.get_command(f"process_{i}") == f"python {temp_f.name}" - time.sleep(5) + time.sleep(3) _run.kill_all_processes() end_time = time.time() @@ -49,34 +53,33 @@ def test_processes_cwd(create_plain_run: dict[Run, dict]) -> None: """ run, _ = create_plain_run with tempfile.TemporaryDirectory() as temp_dir: - with tempfile.NamedTemporaryFile(dir=temp_dir, suffix=".py") as temp_file: - with open(temp_file.name, "w") as out_f: - out_f.writelines([ - "import os\n", - "f = open('new_file.txt', 'w')\n", - "f.write('Test Line')\n", - "f.close()" - ]) - - run_id = run.id - run.add_process( - identifier="sleep_10_process", - executable="python", - script=temp_file.name, - cwd=temp_dir - ) - time.sleep(1) - run.save_file(os.path.join(temp_dir, "new_file.txt"), 'output') - - client = Client() - - # Check that the script was uploaded to the run correctly - os.makedirs(os.path.join(temp_dir, "downloaded")) - client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), path=os.path.join(temp_dir, "downloaded")) - assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) - - client.get_artifact_as_file(run_id, "new_file.txt", path=os.path.join(temp_dir, "downloaded")) - new_file = open(os.path.join(temp_dir, "downloaded", "new_file.txt"), "r") - assert new_file.read() == "Test Line" - new_file.close() + with tempfile.NamedTemporaryFile(dir=temp_dir, suffix=".py") as temp_file: + with open(temp_file.name, "w") as out_f: + out_f.writelines([ + "import os\n", + "f = open('new_file.txt', 'w')\n", + "f.write('Test Line')\n", + "f.close()" + ]) + + run_id = run.id + run.add_process( + identifier="sleep_10_process", + executable="python", + script=temp_file.name, + cwd=temp_dir + ) + time.sleep(1) + run.save_file(os.path.join(temp_dir, "new_file.txt"), 'output') + + client = Client() + + # Check that the script was uploaded to the run correctly + os.makedirs(os.path.join(temp_dir, "downloaded")) + client.get_artifact_as_file(run_id, os.path.basename(temp_file.name), output_dir=os.path.join(temp_dir, "downloaded")) + assert filecmp.cmp(os.path.join(temp_dir, "downloaded", os.path.basename(temp_file.name)), temp_file.name) + + client.get_artifact_as_file(run_id, "new_file.txt", output_dir=os.path.join(temp_dir, "downloaded")) + with open(os.path.join(temp_dir, "downloaded", "new_file.txt"), "r") as new_file: + assert new_file.read() == "Test Line" diff --git a/tests/functional/test_scenarios.py b/tests/functional/test_scenarios.py index c3d266c0..9b9983c5 100644 --- a/tests/functional/test_scenarios.py +++ b/tests/functional/test_scenarios.py @@ -59,7 +59,7 @@ def upload(name: str, values_per_run: int, shared_dict) -> None: def test_uploaded_data_immediately_accessible( values_per_run: int, processing: str, run_deleter ) -> None: - name = "Test-" + str(random.randint(0, 1000000000)) + name = f"Test-{random.randint(0, 1000000000)}" manager = Manager() shared_dict = manager.dict() diff --git a/tests/unit/test_conversion_to_dataframe.py b/tests/unit/test_conversion_to_dataframe.py index 205a4ea9..d68bb1a7 100644 --- a/tests/unit/test_conversion_to_dataframe.py +++ b/tests/unit/test_conversion_to_dataframe.py @@ -1,5 +1,8 @@ +import pytest +from numpy import exp from simvue.converters import to_dataframe +@pytest.mark.local def test_run_conversion_to_dataframe(): """ Check that runs can be successfully converted to a dataframe @@ -21,25 +24,29 @@ def test_run_conversion_to_dataframe(): runs_df = to_dataframe(runs) - assert(runs_df.columns.to_list() == ['name', - 'status', - 'folder', - 'created', - 'started', - 'ended', - 'metadata.a1', - 'metadata.b1', - 'metadata.a2', - 'metadata.b2']) + expected_columns = [ + 'name', + 'status', + 'folder', + 'created', + 'started', + 'ended', + 'metadata.a1', + 'metadata.b1', + 'metadata.a2', + 'metadata.b2' + ] + + assert sorted(runs_df.columns.to_list()) == sorted(expected_columns) data = runs_df.to_dict('records') - for i in range(0, len(runs)): + for i in range(len(runs)): assert(runs[i]['name'] == data[i]['name']) assert(runs[i]['folder'] == data[i]['folder']) assert(runs[i]['created'] == data[i]['created']) assert(runs[i]['started'] == data[i]['started']) assert(runs[i]['ended'] == data[i]['ended']) for item in runs[i]['metadata']: - index = 'metadata.%s' % item + index = f'metadata.{item}' assert(index in data[i]) assert(runs[i]['metadata'][item] == data[i][index]) diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py new file mode 100644 index 00000000..d55fd08c --- /dev/null +++ b/tests/unit/test_event_alert.py @@ -0,0 +1,169 @@ +import time +import json +import pytest +import contextlib +import uuid + +from simvue.api.objects import Alert, EventsAlert +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_event_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + description=None + ) + _alert.commit() + assert _alert.source == "events" + assert _alert.alert.frequency == 1 + assert _alert.alert.pattern == "completed" + assert _alert.name == f"events_alert_{_uuid}" + assert _alert.notification == "none" + assert _alert.to_dict() + _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_event_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + offline=True, + description=None + ) + + _alert.commit() + assert _alert.source == "events" + assert _alert.alert.frequency == 1 + assert _alert.alert.pattern == "completed" + assert _alert.name == f"events_alert_{_uuid}" + assert _alert.notification == "none" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("source") == "events" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("alert").get("pattern") == "completed" + assert _local_data.get("name") == f"events_alert_{_uuid}" + assert _local_data.get("notification") == "none" + + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_alert = Alert(_id_mapping.get(_alert.id)) + assert _online_alert.source == "events" + assert _online_alert.alert.frequency == 1 + assert _online_alert.alert.pattern == "completed" + assert _online_alert.name == f"events_alert_{_uuid}" + assert _online_alert.notification == "none" + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + +@pytest.mark.api +@pytest.mark.online +def test_event_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + description=None + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + _new_alert.read_only(False) + assert isinstance(_new_alert, EventsAlert) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_event_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + offline=True, + description=None + ) + _alert.commit() + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_alert = Alert(_id_mapping.get(_alert.id)) + assert _online_alert.source == "events" + assert _online_alert.alert.frequency == 1 + assert _online_alert.alert.pattern == "completed" + assert _online_alert.name == f"events_alert_{_uuid}" + assert _online_alert.notification == "none" + + _new_alert = EventsAlert(_alert.id) + _new_alert.read_only(False) + _new_alert.description = "updated!" + _new_alert.commit() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + + +@pytest.mark.api +@pytest.mark.online +def test_event_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = EventsAlert.new( + name=f"events_alert_{_uuid}", + frequency=1, + pattern="completed", + notification="none", + description="event_alert prop alert" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py new file mode 100644 index 00000000..205643e6 --- /dev/null +++ b/tests/unit/test_events.py @@ -0,0 +1,32 @@ +import contextlib +import json +import pytest +import time +import datetime +import uuid + +from simvue.api.objects import Events, Folder, Run +from simvue.models import DATETIME_FORMAT + +@pytest.mark.api +@pytest.mark.online +def test_events_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + _timestamp = datetime.datetime.now().strftime(DATETIME_FORMAT) + _events = Events.new( + run=_run.id, + events=[ + {"message": "This is a test!", "timestamp": _timestamp} + ], + ) + assert _events.to_dict() + _events.commit() + assert _events.get(run_id=_run.id) + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py new file mode 100644 index 00000000..f21b8ca1 --- /dev/null +++ b/tests/unit/test_file_artifact.py @@ -0,0 +1,92 @@ +import pytest +import uuid +import time +import pathlib +import tempfile +import json +from simvue.api.objects import FileArtifact, Run, Artifact +from simvue.api.objects.folder import Folder +from simvue.sender import sender +from simvue.client import Client + +@pytest.mark.api +@pytest.mark.online +def test_file_artifact_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + + _failed = [] + + with tempfile.NamedTemporaryFile(suffix=".txt") as temp_f: + _path = pathlib.Path(temp_f.name) + with _path.open("w") as out_f: + out_f.write(f"Hello World! {_uuid}") + _artifact = FileArtifact.new( + name=f"test_file_artifact_{_uuid}", + file_path=_path, + storage=None, + mime_type=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + time.sleep(1) + for member in _artifact._properties: + try: + getattr(_artifact, member) + except Exception as e: + _failed.append((member, f"{e}")) + assert _artifact.name == f"test_file_artifact_{_uuid}" + _content = b"".join(_artifact.download_content()).decode("UTF-8") + assert _content == f"Hello World! {_uuid}" + assert _artifact.to_dict() + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) + + +@pytest.mark.api +@pytest.mark.offline +def test_file_artifact_creation_offline(offline_test: pathlib.Path) -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=f"test_file_artifact_creation_offline_{_uuid}",folder=_folder_name, offline=True) + + _path = offline_test.joinpath("hello_world.txt") + + with _path.open("w") as out_f: + out_f.write(f"Hello World! {_uuid}") + + _folder.commit() + _run.commit() + _artifact = FileArtifact.new( + name=f"test_file_artifact_{_uuid}", + file_path=_path, + storage=None, + mime_type=None, + offline=True, + metadata=None + ) + _artifact.attach_to_run(_run._identifier, category="input") + + with _artifact._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_file_artifact_{_uuid}" + assert _local_data.get("runs") == {_run._identifier: "input"} + + _id_mapping = sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + + _online_artifact = Artifact(_id_mapping[_artifact.id]) + assert _online_artifact.name == _artifact.name + _content = b"".join(_online_artifact.download_content()).decode("UTF-8") + assert _content == f"Hello World! {_uuid}" + _run.delete() + _folder.delete() + diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py new file mode 100644 index 00000000..46f053cd --- /dev/null +++ b/tests/unit/test_file_storage.py @@ -0,0 +1,49 @@ +import pytest +import time +import json +import uuid + +from simvue.api.objects import FileStorage +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_create_file_storage_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = FileStorage.new( + name=_uuid, disable_check=False, is_tenant_useable=False, is_default=False, is_enabled=False) + _storage.commit() + assert _storage.is_enabled == False + assert _storage.name == _uuid + assert _storage.is_default == False + + assert _storage.to_dict() + _storage.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_create_file_storage_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = FileStorage.new(name=_uuid, disable_check=True, is_tenant_useable=False, is_default=False, offline=True, is_enabled=False) + + _storage.commit() + assert _storage.name == _uuid + assert _storage.is_enabled == False + assert _storage.is_default == False + + with _storage._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("is_enabled") == False + assert _local_data.get("is_default") == False + + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + time.sleep(1) + _online_storage = FileStorage(_id_mapping.get(_storage.id)) + assert _online_storage.name == _uuid + assert _online_storage.is_enabled == False + assert _online_storage.is_default == False + + _online_storage.read_only(False) + _online_storage.delete() \ No newline at end of file diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py new file mode 100644 index 00000000..a029563c --- /dev/null +++ b/tests/unit/test_folder.py @@ -0,0 +1,139 @@ +import typing +import pytest +import uuid +import contextlib +import json +import time +import os + +from simvue.api.objects.folder import Folder +from simvue.sender import sender +from simvue.client import Client +@pytest.mark.api +@pytest.mark.online +def test_folder_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path) + _folder.commit() + assert _folder.id + assert _folder.path == _path + _folders = dict(Folder.get(count=10)) + assert _folders + assert _folders[_folder.id] + assert _folders[_folder.id]._read_only + assert _folder.to_dict() + with pytest.raises(AssertionError): + _folders[_folder.id].name = "hello" + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + +@pytest.mark.api +@pytest.mark.offline +def test_folder_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path, offline=True) + _folder.commit() + + with _folder._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _folder._local_staging_file.name.split(".")[0] == _folder.id + assert _local_data.get("path", None) == _path + + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + time.sleep(1) + client = Client() + + _folder_new = client.get_folder(_path) + assert _folder_new.path == _path + + _folder_new.delete() + + assert not _folder._local_staging_file.exists() + + +@pytest.mark.api +@pytest.mark.online +def test_folder_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _description = "Test study" + _tags = ["testing", "api"] + _folder = Folder.new(path=_path) + _folder.commit() + time.sleep(1) + _folder_new = Folder(identifier=_folder.id) + _folder_new.read_only(False) + _folder_new.tags = _tags + _folder_new.description = _description + _folder_new.commit() + assert _folder_new.tags == _tags + assert _folder.tags == _tags + assert _folder_new.description == _description + assert _folder.description == _description + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + +@pytest.mark.api +@pytest.mark.offline +def test_folder_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _description = "Test study" + _tags = ["testing", "api"] + _folder = Folder.new(path=_path, offline=True) + _folder.commit() + + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + time.sleep(1) + + client = Client() + _folder_online = client.get_folder(_path) + assert _folder_online.path == _path + + _folder_new = Folder(identifier=_folder.id) + _folder_new.read_only(False) + _folder_new.tags = _tags + _folder_new.description = _description + _folder_new.commit() + + with _folder._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _folder._local_staging_file.name.split(".")[0] == _folder.id + assert _local_data.get("description", None) == _description + assert _local_data.get("tags", None) == _tags + + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + time.sleep(1) + + _folder_online.refresh() + assert _folder_online.path == _path + assert _folder_online.description == _description + assert _folder_online.tags == _tags + + _folder_online.read_only(False) + _folder_online.delete() + + +@pytest.mark.api +@pytest.mark.online +def test_folder_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _path = f"/simvue_unit_testing/objects/folder/{_uuid}" + _folder = Folder.new(path=_path) + _folder.commit() + + _failed = [] + + for member in _folder._properties: + try: + getattr(_folder, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _folder.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_matplotlib_figure_mime_type.py b/tests/unit/test_matplotlib_figure_mime_type.py index c0deaa2a..d984f676 100644 --- a/tests/unit/test_matplotlib_figure_mime_type.py +++ b/tests/unit/test_matplotlib_figure_mime_type.py @@ -8,7 +8,8 @@ plt = None @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") -def test_matplotlib_figure_mime_type(): +@pytest.mark.local +def test_matplotlib_figure_mime_type() -> None: """ Check that a matplotlib figure has the correct mime-type """ diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py new file mode 100644 index 00000000..4d01e14f --- /dev/null +++ b/tests/unit/test_metadata.py @@ -0,0 +1,47 @@ +import pytest +import pathlib +import re +import simvue.metadata as sv_meta + + +@pytest.mark.metadata +@pytest.mark.local +def test_cargo_env() -> None: + metadata = sv_meta._rust_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + assert metadata["rust.environment.serde"] == "1.0.123" + assert metadata["rust.project.name"] == "example_project" + +@pytest.mark.metadata +@pytest.mark.local +@pytest.mark.parametrize( + "backend", ("poetry", "uv", None) +) +def test_python_env(backend: str | None) -> None: + if backend == "poetry": + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data", "python_poetry")) + assert metadata["python.project.name"] == "example-repo" + elif backend == "uv": + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data", "python_uv")) + assert metadata["python.project.name"] == "example-repo" + else: + metadata = sv_meta._python_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + + assert re.findall(r"\d+\.\d+\.\d+", metadata["python.environment.numpy"]) + + +@pytest.mark.metadata +@pytest.mark.local +def test_julia_env() -> None: + metadata = sv_meta._julia_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + assert metadata["julia.project.name"] == "Julia Demo Project" + assert re.findall(r"\d+\.\d+\.\d+", metadata["julia.environment.AbstractDifferentiation"]) + + +@pytest.mark.metadata +@pytest.mark.local +def test_js_env() -> None: + metadata = sv_meta._node_js_env(pathlib.Path(__file__).parents[1].joinpath("example_data")) + assert metadata["javascript.project.name"] == "my-awesome-project" + assert re.findall(r"\d+\.\d+\.\d+", metadata["javascript.environment.node_modules/dotenv"]) + + diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py new file mode 100644 index 00000000..4079acb7 --- /dev/null +++ b/tests/unit/test_metric_range_alert.py @@ -0,0 +1,192 @@ +import contextlib +import time +import pytest +import json +import uuid + +from simvue.api.objects import MetricsRangeAlert, Alert +from simvue.client import Client +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_metric_range_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_range_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_metric_range_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + offline=True + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_range_alert_{_uuid}" + assert _alert.notification == "none" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("source") == "metrics" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("name") == f"metrics_range_alert_{_uuid}" + assert _local_data.get("notification") == "none" + assert _local_data.get("alert").get("range_low") == 10 + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_range_alert_{_uuid}" + assert _online_alert.alert.range_low == 10 + + _alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + + +@pytest.mark.api +@pytest.mark.online +def test_metric_range_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsRangeAlert) + _new_alert.read_only(False) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_metric_range_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range", + offline=True + ) + _alert.commit() + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_range_alert_{_uuid}" + assert _online_alert.alert.range_low == 10 + + _new_alert = MetricsRangeAlert(_alert.id) + _new_alert.read_only(False) + _new_alert.description = "updated!" + _new_alert.commit() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + +@pytest.mark.api +@pytest.mark.online +def test_metric_range_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsRangeAlert.new( + name=f"metrics_range_alert_{_uuid}", + description = "Test metric range alerts", + frequency=1, + notification="none", + metric="x", + range_low=10, + range_high=15, + window=1, + aggregation="average", + rule="is inside range" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py new file mode 100644 index 00000000..6d737945 --- /dev/null +++ b/tests/unit/test_metric_threshold_alert.py @@ -0,0 +1,190 @@ +import time +import contextlib +import pytest +import json +import uuid + +from simvue.api.objects import MetricsThresholdAlert, Alert +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_metric_threshold_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + metric="x", + threshold=10, + window=1, + rule="is above", + aggregation="average", + description="a metric threshold alert" + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_metric_threshold_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + offline=True, + description="a metric threshold alert" + ) + _alert.commit() + assert _alert.source == "metrics" + assert _alert.alert.frequency == 1 + assert _alert.name == f"metrics_threshold_alert_{_uuid}" + assert _alert.notification == "none" + + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("source") == "metrics" + assert _local_data.get("alert").get("frequency") == 1 + assert _local_data.get("name") == f"metrics_threshold_alert_{_uuid}" + assert _local_data.get("notification") == "none" + assert _local_data.get("alert").get("threshold") == 10 + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" + assert _online_alert.alert.threshold == 10 + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + + +@pytest.mark.api +@pytest.mark.online +def test_metric_threshold_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + description="a metric threshold alert", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, MetricsThresholdAlert) + _new_alert.read_only(False) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_metric_threshold_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + frequency=1, + notification="none", + threshold=10, + window=1, + metric="x", + rule="is above", + aggregation="average", + offline=True, + description="a metric threshold alert" + ) + _alert.commit() + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = MetricsThresholdAlert(_online_id) + + assert _online_alert.source == "metrics" + assert _online_alert.alert.frequency == 1 + assert _online_alert.name == f"metrics_threshold_alert_{_uuid}" + assert _online_alert.alert.threshold == 10 + + _new_alert = MetricsThresholdAlert(_alert.id) + _new_alert.read_only(False) + assert isinstance(_new_alert, MetricsThresholdAlert) + _new_alert.description = "updated!" + _new_alert.commit() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + +@pytest.mark.api +@pytest.mark.online +def test_metric_range_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = MetricsThresholdAlert.new( + name=f"metrics_threshold_alert_{_uuid}", + description="a metric threshold alert", + frequency=1, + notification="none", + metric="x", + threshold=10, + window=1, + rule="is above", + aggregation="average" + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py new file mode 100644 index 00000000..d8432fc0 --- /dev/null +++ b/tests/unit/test_metrics.py @@ -0,0 +1,46 @@ +import contextlib +import json +import pytest +import time +import datetime +import uuid + +from simvue.api.objects import Metrics, Folder, Run + +@pytest.mark.api +@pytest.mark.online +def test_metrics_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _values = { + "x": 1, + "y": 2.0, + "z": True + } + _time: int = 1 + _step: int = 1 + _folder.commit() + _run.commit() + _metrics = Metrics.new( + run=_run.id, + metrics=[ + { + "timestamp": datetime.datetime.now(datetime.timezone.utc).strftime( + "%Y-%m-%dT%H:%M:%S.%f" + ), + "time": _time, + "step": _step, + "values": _values, + } + ], + ) + assert _metrics.to_dict() + _metrics.commit() + assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") + assert _metrics.span(run_ids=[_run.id]) + assert _metrics.names(run_ids=[_run.id]) + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + diff --git a/tests/unit/test_numpy_array_mime_type.py b/tests/unit/test_numpy_array_mime_type.py index 7523d30b..01c47295 100644 --- a/tests/unit/test_numpy_array_mime_type.py +++ b/tests/unit/test_numpy_array_mime_type.py @@ -1,7 +1,9 @@ from simvue.serialization import serialize_object import numpy as np +import pytest -def test_numpy_array_mime_type(): +@pytest.mark.local +def test_numpy_array_mime_type() -> None: """ Check that the mimetype for numpy arrays is correct """ diff --git a/tests/unit/test_numpy_array_serialization.py b/tests/unit/test_numpy_array_serialization.py index 0f713cdd..52d6e6d0 100644 --- a/tests/unit/test_numpy_array_serialization.py +++ b/tests/unit/test_numpy_array_serialization.py @@ -1,7 +1,9 @@ from simvue.serialization import serialize_object, deserialize_data import numpy as np +import pytest -def test_numpy_array_serialization(): +@pytest.mark.local +def test_numpy_array_serialization() -> None: """ Check that a numpy array can be serialized then deserialized successfully """ diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py new file mode 100644 index 00000000..ae1bd464 --- /dev/null +++ b/tests/unit/test_object_artifact.py @@ -0,0 +1,78 @@ +import pytest +import uuid +import time +import pathlib +import numpy +import json +from simvue.api.objects import ObjectArtifact, Run, Artifact +from simvue.api.objects.folder import Folder +from simvue.sender import sender +from simvue.serialization import _deserialize_numpy_array + +@pytest.mark.api +@pytest.mark.online +def test_object_artifact_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(name=f"test_object_artifact_run_{_uuid}", folder=_folder_name) + _folder.commit() + _run.commit() + + _array = numpy.array(range(10)) + _artifact = ObjectArtifact.new( + name=f"test_object_artifact_{_uuid}", + obj=_array, + storage=None, + metadata=None + ) + _artifact.attach_to_run(_run.id, "input") + time.sleep(1) + + _downloaded = _deserialize_numpy_array(next(_artifact.download_content())) + assert numpy.array_equal(_downloaded, _array) + + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + +@pytest.mark.api +@pytest.mark.offline +def test_object_artifact_creation_offline(offline_test: pathlib.Path) -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=f"test_object_artifact_offline_run_{_uuid}", folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + + _array = numpy.array(range(10)) + _artifact = ObjectArtifact.new( + name=f"test_object_artifact_offline_{_uuid}", + obj=_array, + storage=None, + metadata=None, + offline=True + ) + _artifact.attach_to_run(_run.id, "input") + + with _artifact._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_object_artifact_offline_{_uuid}" + assert _local_data.get("mime_type") == "application/vnd.simvue.numpy.v1" + assert _local_data.get("runs") == {_run.id: "input"} + + _id_mapping = sender(offline_test.joinpath(".simvue"), 1, 10) + time.sleep(1) + + _online_artifact = Artifact(_id_mapping.get(_artifact.id)) + + assert _online_artifact.name == f"test_object_artifact_offline_{_uuid}" + assert _online_artifact.mime_type == "application/vnd.simvue.numpy.v1" + + _downloaded = _deserialize_numpy_array(next(_online_artifact.download_content())) + assert numpy.array_equal(_downloaded, _array) + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + diff --git a/tests/unit/test_pandas_dataframe_mimetype.py b/tests/unit/test_pandas_dataframe_mimetype.py index 57d5f775..1d3de890 100644 --- a/tests/unit/test_pandas_dataframe_mimetype.py +++ b/tests/unit/test_pandas_dataframe_mimetype.py @@ -7,8 +7,9 @@ except ImportError: pd = None +@pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") -def test_pandas_dataframe_mimetype(): +def test_pandas_dataframe_mimetype() -> None: """ Check that the mime-type of a Pandas dataframe is correct """ diff --git a/tests/unit/test_pandas_dataframe_serialization.py b/tests/unit/test_pandas_dataframe_serialization.py index fde9262d..79c524e1 100644 --- a/tests/unit/test_pandas_dataframe_serialization.py +++ b/tests/unit/test_pandas_dataframe_serialization.py @@ -7,8 +7,9 @@ except ImportError: pd = None +@pytest.mark.local @pytest.mark.skipif(not pd, reason="Pandas is not installed") -def test_pandas_dataframe_serialization(): +def test_pandas_dataframe_serialization() -> None: """ Check that a Pandas dataframe can be serialized then deserialized successfully """ diff --git a/tests/unit/test_pickle_serialization.py b/tests/unit/test_pickle_serialization.py index 60833665..12e89ae8 100644 --- a/tests/unit/test_pickle_serialization.py +++ b/tests/unit/test_pickle_serialization.py @@ -1,6 +1,7 @@ from simvue.serialization import deserialize_data, serialize_object - -def test_pickle_serialization(): +import pytest +@pytest.mark.local +def test_pickle_serialization() -> None: """ Check that a dictionary can be serialized then deserialized successfully """ diff --git a/tests/unit/test_plotly_figure_mime_type.py b/tests/unit/test_plotly_figure_mime_type.py index 8cf8a479..6884a440 100644 --- a/tests/unit/test_plotly_figure_mime_type.py +++ b/tests/unit/test_plotly_figure_mime_type.py @@ -13,10 +13,10 @@ except ImportError: plotly = None - +@pytest.mark.local @pytest.mark.skipif(not plt, reason="Matplotlib is not installed") @pytest.mark.skipif(not plotly, reason="Plotly is not installed") -def test_plotly_figure_mime_type(): +def test_plotly_figure_mime_type() -> None: """ Check that a plotly figure has the correct mime-type """ diff --git a/tests/unit/test_pytorch_tensor_mime_type.py b/tests/unit/test_pytorch_tensor_mime_type.py index 011dd1b4..35391850 100644 --- a/tests/unit/test_pytorch_tensor_mime_type.py +++ b/tests/unit/test_pytorch_tensor_mime_type.py @@ -6,9 +6,10 @@ import torch except ImportError: torch = None - + +@pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") -def test_pytorch_tensor_mime_type(): +def test_pytorch_tensor_mime_type() -> None: """ Check that a PyTorch tensor has the correct mime-type """ diff --git a/tests/unit/test_pytorch_tensor_serialization.py b/tests/unit/test_pytorch_tensor_serialization.py index 07b46048..18a36e1b 100644 --- a/tests/unit/test_pytorch_tensor_serialization.py +++ b/tests/unit/test_pytorch_tensor_serialization.py @@ -6,8 +6,9 @@ except ImportError: torch = None +@pytest.mark.local @pytest.mark.skipif(not torch, reason="Torch is not installed") -def test_pytorch_tensor_serialization(): +def test_pytorch_tensor_serialization() -> None: """ Check that a PyTorch tensor can be serialized then deserialized successfully """ diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py new file mode 100644 index 00000000..a35b8ef6 --- /dev/null +++ b/tests/unit/test_run.py @@ -0,0 +1,178 @@ +import contextlib +import json +import pytest +import time +import datetime +import uuid +from simvue.sender import sender +from simvue.api.objects import Run, Folder +from simvue.client import Client + +@pytest.mark.api +@pytest.mark.online +def test_run_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + assert _run.folder == _folder_name + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + +@pytest.mark.api +@pytest.mark.offline +def test_run_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _run_name = f"simvue_offline_run_{_uuid}" + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=_run_name,folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + assert _run.name == _run_name + assert _run.folder == _folder_name + + with _run._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" + assert _local_data.get("folder") == _folder_name + + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + time.sleep(1) + + # Get online ID and retrieve run + _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_run = Run(_online_id) + + assert _online_run.name == _run_name + assert _online_run.folder == _folder_name + + _run.delete() + _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").unlink() + client = Client() + client.delete_folder(_folder_name, recursive=True, remove_runs=True) + +@pytest.mark.api +@pytest.mark.online +def test_run_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _folder.commit() + _run.commit() + assert _run.folder == _folder_name + time.sleep(1) + _now = datetime.datetime.now() + _new_run = Run(identifier=_run.id) + assert _new_run.status == "created" + _new_run.read_only(False) + _new_run.status = "running" + _new_run.name = "simvue_test_run" + _new_run.description = "Simvue test run" + _new_run.tags = ["simvue", "test", "tag"] + _new_run.ttl = 120 + assert _new_run.ttl != 120 + _new_run.commit() + time.sleep(1) + assert _run.ttl == 120 + assert _run.description == "Simvue test run" + assert sorted(_run.tags) == sorted(["simvue", "test", "tag"]) + assert _run.name == "simvue_test_run" + assert _run.status == "running" + _run.abort("test_run_abort") + assert _new_run.abort_trigger + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) + + +@pytest.mark.api +@pytest.mark.offline +def test_run_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _run_name = f"simvue_offline_run_{_uuid}" + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name=_run_name, folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + assert _run.name == _run_name + assert _run.folder == _folder_name + time.sleep(1) + _new_run = Run(identifier=_run.id, offline=True) + # Property has not been committed to offline + # object so not yet available + with pytest.raises(AttributeError): + _new_run.ttl + _new_run.read_only(False) + _new_run.name = "simvue_test_run" + _new_run.description = "Simvue test run" + _new_run.ttl = 120 + + _new_run.commit() + + assert _new_run.ttl == 120 + assert _new_run.description == "Simvue test run" + assert _new_run.name == "simvue_test_run" + + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + time.sleep(1) + + # Get online ID and retrieve run + _online_id = _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_run = Run(_online_id) + + assert _online_run.ttl == 120 + assert _online_run.description == "Simvue test run" + assert _online_run.name == "simvue_test_run" + assert _online_run.folder == _folder_name + + # Now add a new set of tags in offline mode and send + _new_run.tags = ["simvue", "test", "tag"] + _new_run.commit() + + # Shouldn't yet be available in the online run since it hasnt been sent + _online_run.refresh() + assert _online_run.tags == [] + + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + time.sleep(1) + + _online_run.refresh() + assert sorted(_new_run.tags) == sorted(["simvue", "test", "tag"]) + assert sorted(_online_run.tags) == sorted(["simvue", "test", "tag"]) + + _run.delete() + _run._local_staging_file.parents[1].joinpath("server_ids", f"{_run._local_staging_file.name.split('.')[0]}.txt").unlink() + client = Client() + client.delete_folder(_folder_name, recursive=True, remove_runs=True) + + +@pytest.mark.api +@pytest.mark.online +def test_run_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name) + _run = Run.new(folder=_folder_name) + _run.status = "running" + _run.ttl = 60 + _folder.commit() + _run.commit() + _failed = [] + assert _run.to_dict() + + for member in _run._properties: + try: + getattr(_run, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _run.delete() + _folder.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_run_init_folder.py b/tests/unit/test_run_init_folder.py index 286f6251..0bcd5b0c 100644 --- a/tests/unit/test_run_init_folder.py +++ b/tests/unit/test_run_init_folder.py @@ -1,8 +1,8 @@ from simvue import Run import pytest - -def test_run_init_folder(): +@pytest.mark.local +def test_run_init_folder() -> None: """ Check that run.init throws an exception if folder input is not specified correctly """ diff --git a/tests/unit/test_run_init_metadata.py b/tests/unit/test_run_init_metadata.py deleted file mode 100644 index 1973f35a..00000000 --- a/tests/unit/test_run_init_metadata.py +++ /dev/null @@ -1,22 +0,0 @@ -from simvue import Run -import pytest - -def test_run_init_metadata(): - """ - Check that run.init throws an exception if tuples are passed into metadata dictionary - """ - - x1_lower = 2, - x1_upper = 6, - - run = Run(mode='offline') - - with pytest.raises(RuntimeError) as exc_info: - run.init(metadata={'dataset.x1_lower': x1_lower, 'dataset.x1_upper': x1_upper}, - description="A test to validate inputs passed into metadata dictionary", - retention_period="1 hour", - folder="/simvue_unit_testing", - tags=["simvue_client_unit_tests", "test_run_init_metadata"] - ) - - assert "Input should be a valid integer" in str(exc_info.value) diff --git a/tests/unit/test_run_init_tags.py b/tests/unit/test_run_init_tags.py index 6e247446..377594fe 100644 --- a/tests/unit/test_run_init_tags.py +++ b/tests/unit/test_run_init_tags.py @@ -1,7 +1,8 @@ from simvue import Run import pytest -def test_run_init_tags(): +@pytest.mark.local +def test_run_init_tags() -> None: """ Check that run.init throws an exception if tags are not a list """ diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py new file mode 100644 index 00000000..8c80a75f --- /dev/null +++ b/tests/unit/test_s3_storage.py @@ -0,0 +1,93 @@ +import pytest +import time +import json +import uuid + +from simvue.api.objects import S3Storage +from simvue.api.objects.storage.fetch import Storage +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_create_s3_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _failed = [] + _storage = S3Storage.new( + name=_uuid, + endpoint_url="https://not-a-real-url.io", + disable_check=True, + is_tenant_useable=False, + is_default=False, + region_name="fictionsville", + access_key_id="dummy_key", + secret_access_key="not_a_key", + bucket="dummy_bucket", + is_enabled=False + ) + _storage.commit() + for member in _storage._properties: + try: + getattr(_storage, member) + except Exception as e: + _failed.append((member, f"{e}")) + assert _storage.to_dict() + assert _storage.name == _uuid + assert _storage.config.endpoint_url == "https://not-a-real-url.io/" + assert _storage.config.region_name == "fictionsville" + assert _storage.config.bucket == "dummy_bucket" + assert _storage.created + assert dict(Storage.get()) + _storage.delete() + if _failed: + raise AssertionError("\n\t-" + "\n\t- ".join(": ".join(i) for i in _failed)) + + +@pytest.mark.api +@pytest.mark.offline +def test_create_s3_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _storage = S3Storage.new( + name=_uuid, + endpoint_url="https://not-a-real-url.io", + disable_check=True, + region_name="fictionsville", + access_key_id="dummy_key", + secret_access_key="not_a_key", + bucket="dummy_bucket", + is_default=False, + is_tenant_useable=False, + is_enabled=False, + offline=True + ) + _storage.commit() + with _storage._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("config").get("endpoint_url") == "https://not-a-real-url.io/" + assert _local_data.get("config").get("region_name") == "fictionsville" + assert _local_data.get("config").get("bucket") == "dummy_bucket" + assert _local_data.get("is_enabled") == False + assert not _local_data.get("status", None) + assert not _local_data.get("user", None) + assert not _local_data.get("usage", None) + + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + _online_id = _id_mapping[_storage.id] + time.sleep(1) + + _online_storage = S3Storage(_online_id) + + assert _online_storage.name == _uuid + assert _online_storage.is_enabled == False + assert _online_storage.config.endpoint_url == "https://not-a-real-url.io/" + assert _online_storage.config.region_name == "fictionsville" + assert _online_storage.config.bucket == "dummy_bucket" + + _online_storage.read_only(False) + _online_storage.delete() + + + + + + diff --git a/tests/unit/test_stats.py b/tests/unit/test_stats.py new file mode 100644 index 00000000..e2e67f1d --- /dev/null +++ b/tests/unit/test_stats.py @@ -0,0 +1,19 @@ +import pytest + +from simvue.api.objects import Stats + +@pytest.mark.api +@pytest.mark.online +def test_stats() -> None: + _statistics = Stats() + assert f"{_statistics.url}" == f"{_statistics._base_url}" + assert isinstance(_statistics.runs.created, int) + assert isinstance(_statistics.runs.running, int) + assert isinstance(_statistics.runs.completed, int) + assert isinstance(_statistics.runs.data, int) + assert _statistics.to_dict() + assert _statistics.whoami() + + with pytest.raises(AttributeError): + Stats.new() + diff --git a/tests/unit/test_suppress_errors.py b/tests/unit/test_suppress_errors.py index 8a1b9f29..73c114dc 100644 --- a/tests/unit/test_suppress_errors.py +++ b/tests/unit/test_suppress_errors.py @@ -2,7 +2,8 @@ import pytest import logging -def test_suppress_errors_false(): +@pytest.mark.local +def test_suppress_errors_false() -> None: """ Check that exceptions are thrown if suppress_errors disabled """ @@ -15,7 +16,8 @@ def test_suppress_errors_false(): ) assert "Input should be a valid boolean, unable to interpret input" in f"{e.value}" -def test_suppress_errors_true(caplog): +@pytest.mark.local +def test_suppress_errors_true(caplog) -> None: """ Check that no exceptions are thrown and messages are added to log if suppress_errors enabled """ @@ -30,7 +32,8 @@ def test_suppress_errors_true(caplog): assert "Input should be a valid boolean, unable to interpret input" in caplog.text -def test_suppress_errors_default(caplog): +@pytest.mark.local +def test_suppress_errors_default(caplog) -> None: """ Check that by default no exceptions are thrown and messages are added to log """ diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py new file mode 100644 index 00000000..1ddac7b1 --- /dev/null +++ b/tests/unit/test_tag.py @@ -0,0 +1,134 @@ +import time +import contextlib +import pytest +import uuid +import json +import pydantic.color +from simvue.api.objects.tag import Tag +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_tag_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() + assert _tag.name == f"test_tag_{_uuid}" + assert _tag.colour + assert not _tag.description + _tag.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_tag_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) + _tag.commit() + assert _tag.name == f"test_tag_{_uuid}" + + with pytest.raises(AttributeError): + _tag.colour + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_tag_{_uuid}" + + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + time.sleep(1) + + _online_id = _id_mapping.get(_tag.id) + + _online_tag = Tag(_online_id) + assert _online_tag.name == f"test_tag_{_uuid}" + _online_tag.read_only(False) + _online_tag.delete() + + + + +@pytest.mark.api +@pytest.mark.online +def test_tag_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() + time.sleep(1) + _new_tag = Tag(_tag.id) + _new_tag.read_only(False) + _new_tag.name = _tag.name.replace("test", "test_modified") + _new_tag.colour = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.description = "modified test tag" + _new_tag.commit() + assert _new_tag.name == f"test_modified_tag_{_uuid}" + assert _new_tag.colour.r == 250 / 255 + assert _new_tag.description == "modified test tag" + + +@pytest.mark.api +@pytest.mark.offline +def test_tag_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}", offline=True) + _tag.commit() + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("name") == f"test_tag_{_uuid}" + + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + _online_id = _id_mapping.get(_tag.id) + _online_tag = Tag(_online_id) + + assert _online_tag.name == f"test_tag_{_uuid}" + + _new_tag = Tag(_tag.id) + _new_tag.read_only(False) + _new_tag.name = _tag.name.replace("test", "test_modified") + _new_tag.colour = "rgb({r}, {g}, {b})".format(r=250, g=0, b=0) + _new_tag.description = "modified test tag" + _new_tag.commit() + + # Check since not yet sent, online not changed + _online_tag.refresh() + assert _online_tag.name == f"test_tag_{_uuid}" + + with _tag._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == f"test_modified_tag_{_uuid}" + assert pydantic.color.parse_str(_local_data.get("colour")).r == 250 / 255 + assert _local_data.get("description") == "modified test tag" + + sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + time.sleep(1) + + # Check online version is updated + _online_tag.refresh() + assert _online_tag.name == f"test_modified_tag_{_uuid}" + assert _online_tag.colour.r == 250 / 255 + assert _online_tag.description == "modified test tag" + + _online_tag.read_only(False) + _online_tag.delete() + + +@pytest.mark.api +@pytest.mark.online +def test_tag_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tag = Tag.new(name=f"test_tag_{_uuid}") + _tag.commit() + _failed = [] + + for member in _tag._properties: + try: + getattr(_tag, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _tag.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py new file mode 100644 index 00000000..82cf001d --- /dev/null +++ b/tests/unit/test_tenant.py @@ -0,0 +1,74 @@ +import pytest +import time +import contextlib +import json +import uuid + +from simvue.api.objects.administrator import Tenant +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_create_tenant_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + time.sleep(1) + _new_tenant = Tenant(_tenant.id) + assert _new_tenant.name == _uuid + assert _new_tenant.is_enabled + _new_tenant.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_create_tenant_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid, offline=True) + _tenant.commit() + time.sleep(1) + _new_tenant = Tenant(_tenant.id) + assert _new_tenant.name == _uuid + assert _new_tenant.is_enabled + + with _new_tenant._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("name") == _uuid + assert _local_data.get("is_enabled") == True + + _id_mapping = sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"]) + time.sleep(1) + _online_user = Tenant(_id_mapping.get(_new_tenant.id)) + assert _online_user.name == _uuid + assert _online_user.is_enabled == True + + _online_user.read_only(False) + _online_user.delete() + + +@pytest.mark.api +@pytest.mark.online +def test_tenant_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + _failed = [] + + for member in _tenant._properties: + try: + getattr(_tenant, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _tenant.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py new file mode 100644 index 00000000..97e503c3 --- /dev/null +++ b/tests/unit/test_user.py @@ -0,0 +1,108 @@ +import pytest +import time +import contextlib +import json +import uuid + +from simvue.api.objects.administrator import User, Tenant +from simvue.sender import sender + +@pytest.mark.api +@pytest.mark.online +def test_create_user_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + is_manager=False, + is_admin=False, + is_readonly=True, + welcome=False, + tenant=_tenant.id + ) + _user.commit() + time.sleep(1) + _new_user = User(_user.id) + assert _new_user.username == "jbloggs" + assert _new_user.enabled + _new_user.delete() + _tenant.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_create_user_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + is_manager=False, + is_admin=False, + is_readonly=True, + welcome=False, + tenant=_uuid, + offline=True + ) + _user.commit() + assert _user.username == "jbloggs" + assert _user.fullname == "Joe Bloggs" + assert _user.email == "jbloggs@simvue.io" + + with _user._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("username") == "jbloggs" + assert _local_data.get("fullname") == "Joe Bloggs" + assert _local_data.get("email") == "jbloggs@simvue.io" + + _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"]) + time.sleep(1) + _online_user = User(_id_mapping.get(_user.id)) + assert _online_user.username == "jbloggs" + assert _online_user.fullname == "Joe Bloggs" + assert _online_user.email == "jbloggs@simvue.io" + + _online_user.read_only(False) + _online_user.delete() + +@pytest.mark.api +@pytest.mark.online +def test_user_get_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _tenant = Tenant.new(name=_uuid) + try: + _tenant.commit() + except RuntimeError as e: + assert "You do not have permission" in str(e) + return + _user = User.new( + username="jbloggs", + fullname="Joe Bloggs", + email="jbloggs@simvue.io", + is_manager=False, + is_admin=False, + is_readonly=True, + welcome=False, + tenant=_tenant.id + ) + _user.commit() + _failed = [] + + for member in _user._properties: + try: + getattr(_user, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _user.delete() + _tenant.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py new file mode 100644 index 00000000..71d08336 --- /dev/null +++ b/tests/unit/test_user_alert.py @@ -0,0 +1,204 @@ +import time +import json +import contextlib +import pytest +import uuid +from simvue.sender import sender +from simvue.api.objects import Alert, UserAlert, Run +from simvue.api.objects.folder import Folder + +@pytest.mark.api +@pytest.mark.online +def test_user_alert_creation_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None + ) + _alert.commit() + assert _alert.source == "user" + assert _alert.name == f"users_alert_{_uuid}" + assert _alert.notification == "none" + assert dict(Alert.get()) + _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_user_alert_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + offline=True, + description = "test user alert" + ) + _alert.commit() + assert _alert.source == "user" + assert _alert.name == f"users_alert_{_uuid}" + assert _alert.notification == "none" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("source") == "user" + assert _local_data.get("name") == f"users_alert_{_uuid}" + assert _local_data.get("notification") == "none" + + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = Alert(_online_id) + + assert _online_alert.source == "user" + assert _online_alert.name == f"users_alert_{_uuid}" + assert _online_alert.notification == "none" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + + +@pytest.mark.api +@pytest.mark.online +def test_user_alert_modification_online() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None + ) + _alert.commit() + time.sleep(1) + _new_alert = Alert(_alert.id) + assert isinstance(_new_alert, UserAlert) + _new_alert.read_only(False) + _new_alert.description = "updated!" + assert _new_alert.description != "updated!" + _new_alert.commit() + assert _new_alert.description == "updated!" + _new_alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_user_alert_modification_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + offline=True, + description = "test user alert" + ) + _alert.commit() + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Get online ID and retrieve alert + _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() + _online_alert = UserAlert(_online_id) + + assert _online_alert.source == "user" + assert _online_alert.name == f"users_alert_{_uuid}" + assert _online_alert.notification == "none" + + _new_alert = UserAlert(_alert.id) + _new_alert.read_only(False) + _new_alert.description = "updated!" + _new_alert.commit() + + # Since changes havent been sent, check online run not updated + _online_alert.refresh() + assert _online_alert.description != "updated!" + + with _alert._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + assert _local_data.get("description") == "updated!" + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + _online_alert.refresh() + assert _online_alert.description == "updated!" + + _online_alert.read_only(False) + _online_alert.delete() + _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").unlink() + +@pytest.mark.api +@pytest.mark.online +def test_user_alert_properties() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None + ) + _alert.commit() + + _failed = [] + + for member in _alert._properties: + try: + getattr(_alert, member) + except Exception as e: + _failed.append((member, f"{e}")) + with contextlib.suppress(Exception): + _alert.delete() + + if _failed: + raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + + +@pytest.mark.api +@pytest.mark.online +def test_user_alert_status() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None + ) + _alert.commit() + _folder = Folder.new(path=f"/simvue_unit_tests/{_uuid}") + _run = Run.new(folder=f"/simvue_unit_tests/{_uuid}") + _folder.commit() + _run.alerts = [_alert.id] + _run.commit() + _alert.set_status(_run.id, "critical") + time.sleep(1) + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + _alert.delete() + + +@pytest.mark.api +@pytest.mark.offline +def test_user_alert_status_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _alert = UserAlert.new( + name=f"users_alert_{_uuid}", + notification="none", + description=None, + offline=True + ) + _alert.commit() + _folder = Folder.new(path=f"/simvue_unit_tests/{_uuid}", offline=True) + _run = Run.new(folder=f"/simvue_unit_tests/{_uuid}", offline=True) + _folder.commit() + _run.alerts = [_alert.id] + _run.commit() + + sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) + time.sleep(1) + + _alert.set_status(_run.id, "critical") + _alert.commit() + import pdb; pdb.set_trace() + time.sleep(1) + _run.delete() + _folder.delete(recursive=True, runs_only=False, delete_runs=True) + _alert.delete() +