diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 070fe78..a7514e8 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -4,61 +4,89 @@ This repository implements a reusable Python module (`pypsa_validation_processing`) that: - Takes a definitions folder holding IAMC-formatted variable definitions - Executes the corresponding function (if available) to extract the value of the respective variable from a given PyPSA NetworkCollection -- Returns the results as a `pyam.IamDataFrame` +- Saves the result as IAMC-formatted xlsx-file. ## Code Structure ```mermaid classDiagram - class workflow.py{ - config_path: pathlib.Path - build_parser() + class Network_Processor { + +path_config: pathlib_Path + +config: dict + +country: str + +definition_path: pathlib_Path + +mapping_path: pathlib_Path + +output_path: pathlib_Path + +network_results_path: pathlib_Path + +model_name: str + +scenario_name: str + +network_collection: pypsa_NetworkCollection + +dsd: nomenclature_DataStructureDefinition + +functions_dict: dict + +dsd_with_values: pyam_IamDataFrame + +path_dsd_with_values: pathlib_Path + +__init__(path_config: pathlib_Path) + +_read_config() dict + +_read_mappings() dict + +_read_pypsa_network_collection() pypsa_NetworkCollection + +read_definitions() nomenclature_DataStructureDefinition + +_execute_function_for_variable(variable: str, n: pypsa_Network) pd_Series + +_postprocess_statistics_result(variable: str, result: pd_Series) pd_DataFrame + +structure_pyam_from_pandas(df: pd_DataFrame) pyam_IamDataFrame + +calculate_variables_values() None + +write_output_to_xlsx() None } - class statistics_functions.py{ - one function per variable + class statistics_functions_py { + +Final_Energy_by_Carrier__Electricity(n: pypsa_Network) pd_DataFrame + +Final_Energy_by_Sector__Transportation(n: pypsa_Network) pd_DataFrame } - class mapping.yaml{ - mapping variables to function names + class utils_py { + +EU27_COUNTRY_CODES: dict~str,str~ + +UNITS_MAPPING: dict~str,str~ } - class config.yaml{ - user configuration + class config_default_yaml { + +country: str + +definitions_path: str + +output_path: str + +network_results_path: str + +model_name: str + +scenario_name: str } - Network_Processor <-- workflow.py : executes - statistics_functions.py <-- Network_Processor : executes - mapping.yaml <|-- Network_Processor : includes - config.yaml <|-- Network_Processor : includes - - class Network_Processor{ - config_path: pathlib.Path - config: dict - network_results_path: pathlib.Path - definitions_path: pathlib.Path - mappings_path: pathlib.Path - country: str - model_name: str - scenario_name: str - network_collection: pypsa.NetworkCollection - dsd: nomenclature.DataStructureDefinition - functions_dict: dict - dsd_with_values: dict - path_dsd_with_values: pathlib.Path - - __init__() - __repr__() - _read_config() - _read_mappings() - _read_pypsa_network_collection() - read_definitions() - _execute_function_for_variable() - structure_pyan_from_pandas() - calculate_variable_values() - write_output_to_xlsx() + class mapping_default_yaml { + +Final_Energy_by_Carrier__Electricity: str + +Final_Energy_by_Sector__Transportation: str } - note for Network_Processor "in class_definitions.py" + + class pypsa_NetworkCollection + class pypsa_Network { + +name: str + +statistics: pypsa_StatisticsAccessor + } + class pypsa_StatisticsAccessor { + +energy_balance(components: list, carrier: str, groupby: list) pd_Series + } + + class nomenclature_DataStructureDefinition { + +variable: pd_Series + } + + class pyam_IamDataFrame + class pd_DataFrame + class pd_Series + + Network_Processor --> pypsa_NetworkCollection : owns + Network_Processor --> nomenclature_DataStructureDefinition : uses + Network_Processor --> pyam_IamDataFrame : creates + Network_Processor --> statistics_functions_py : calls + Network_Processor --> utils_py : imports + Network_Processor --> config_default_yaml : reads + Network_Processor --> mapping_default_yaml : reads + pypsa_NetworkCollection --> pypsa_Network : contains + pypsa_Network --> pypsa_StatisticsAccessor : has ``` ## Folder Structure @@ -75,6 +103,7 @@ classDiagram | | `- mapping.default.yaml | |- class_definitions.py | |- statistics_functions.py +| `- utils.py | `- workflow.py |- workflow.py |- resources @@ -90,6 +119,7 @@ classDiagram - `mapping.default.yaml` (or another mapping-file provided by the config-file) holds the mapping of IAMC variable to the respective function in `pypsa_validation_processing/statistics_functions.py` - The package workflow entrypoint is `pypsa_validation_processing/workflow.py`; the root `workflow.py` is a thin compatibility wrapper - Default configs are packaged inside `pypsa_validation_processing/configs/` +- Pixi is used as environment package manager. Use `pixi run` before your statement in cli to use the intended pixi-environment. - The `resources/` directory holds non-versioned resources - The `sister_packages/` directory holds related packages for background information - The `tests/` directory holds unit and integration tests @@ -102,7 +132,7 @@ A task is complete when: - Changes are integrated into existing folder structure. - A short summary of changes is provided. - In chat mode: the user has reviewed the changes and given approval. -- For a pull-request: the user has to be reviewer of the pull request to give approval. +- For a pull-request: the user is reviewer of the pull request to give approval. ## Forbidden Actions - Do NOT invent datasets, files, or APIs. diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..33749d0 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,30 @@ +name: Tests + +on: + push: + branches: + - main + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Install Pixi + uses: prefix-dev/setup-pixi@v0.9.4 + with: + pixi-version: v0.66.0 + + - name: Run Tests + run: pixi run pytest tests/ -v --cov=pypsa_validation_processing --cov-report=xml + + - name: Upload Coverage Reports + uses: codecov/codecov-action@v4 + with: + files: ./coverage.xml + flags: unittests + name: codecov-umbrella + fail_ci_if_error: false diff --git a/.gitignore b/.gitignore index 6d37bce..9dd5807 100644 --- a/.gitignore +++ b/.gitignore @@ -46,8 +46,8 @@ coverage.xml cover/ # Jupyter Notebook -.ipynb_checkpoints -.ipynb +*.ipynb_checkpoints +*.ipynb # IPython profile_default/ diff --git a/README.md b/README.md index a00e079..ea5907b 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,18 @@ This package processes a PyPSA network for a given set of defined IAMC-Variables ```bash pip install . ``` +### Run the workflow +Run the workflow with +```bash +pixi run workflow +``` +This statement runs `"python workflow.py"` +### Run tests +Run tests with +```bash +pixi run test +``` +This statement runs `"pytest tests/ -v"` ## Project structure @@ -33,9 +45,87 @@ pypsa_validation_processing/ | |-- workflow.py # package-level workflow orchestration | |-- class_definitions.py # core processing classes | |-- statistics_functions.py # pypsa statistics functions +| |-- utils.py # static information and general utility functions | `-- configs/ # package configuration files `-- config.default.yaml # default configuration file `-- mapping.default.yaml # mapping IAMC-variable - statistics-function |-- resources/ # non-versioned resources `-- tests/ # test suite -``` \ No newline at end of file +``` + +## Variable's Statistics - Functions + +This section describes the conventions for adding new variable statistics functions to `pypsa_validation_processing/statistics_functions.py`. + +### Naming Convention + +Function names follow the IAMC variable name with these substitutions: + +- Each `|` (pipe / hierarchy separator) is replaced by `__` (double underscore). +- Spaces are replaced by `_` (single underscore) +- Other special characters are fully removed. + +Examples: + +| IAMC Variable | Function Name | +|---|---| +| `Final Energy [by Carrier]\|Electricity` | `Final_Energy_by_Carrier__Electricity` | +| `Final Energy [by Sector]\|Transportation` | `Final_Energy_by_Sector__Transportation` | + +### Function Signature (fixed) + +Every function receives exactly one argument – a single `pypsa.Network` object representing one investment year – and returns a `pandas.Series`: + +```python +def (n: pypsa.Network) -> pd.Series: + ... +``` + +**The returned `Series` is of the structure of the direct outcome of a `pypsa.statistics` - Function.** It therefore must have a multi-level index that includes a level named `"unit"` and `"variable"`, so that the post-processing step can extract the unit information. It is possible to return multiple values with different units. Units are then converted to IAMC-valid units and summed over. Do not mix energy- and emissions- units in one statement! + +#### Example output + +- statistics-statement, grouped by country and unit: +```python +n.statistics.energy_balance( + carrier = ["land transport EV", "land transport fuel cell", "kerosene for aviation", "shipping methanol"], + components = "Load", + groupby = ["carrier", "unit", "country"], + direction = "withdrawal" +).groupby(["country", "unit"]).sum() +``` + +- returns a processable `pd.Series`: +``` +country unit +AL MWh_LHV 1.073021e+06 + MWh_el 1.996662e+06 +AT MWh_LHV 1.319779e+07 + MWh_el 2.105799e+07 +BA MWh_LHV 3.214431e+05 + ... +SI MWh_el 5.576678e+06 +SK MWh_LHV 1.185324e+06 + MWh_el 8.633450e+06 +XK MWh_LHV 8.771836e+04 + MWh_el 1.081549e+06 +Length: 68, dtype: float64 +``` + +### Mapping File + +`configs/mapping.default.yaml` maps each IAMC variable name to the corresponding function name in `statistics_functions.py`: + +```yaml +Final Energy [by Carrier]|Electricity: Final_Energy_by_Carrier__Electricity +Final Energy [by Sector]|Transportation: Final_Energy_by_Sector__Transportation +``` + +At runtime, `Network_Processor` reads this mapping, looks up the function for each defined variable, and calls it for every network in the collection. Variables without a mapping entry are silently skipped. + +### Register statistics for a new variable +To register a new variable +- add an entry to the mapping file +- implement the corresponding function +- add a corresponding test-function +- make shure, that the introduces variable is also part of your variable set to be executed. \ No newline at end of file diff --git a/pypsa_validation_processing/class_definitions.py b/pypsa_validation_processing/class_definitions.py index b0bc82b..2c493f4 100644 --- a/pypsa_validation_processing/class_definitions.py +++ b/pypsa_validation_processing/class_definitions.py @@ -1,5 +1,5 @@ from __future__ import annotations - +import os from pathlib import Path import yaml import pandas as pd @@ -7,6 +7,8 @@ import nomenclature import pyam +from pypsa_validation_processing.utils import EU27_COUNTRY_CODES, UNITS_MAPPING + class Network_Processor: """Processes a PyPSA NetworkCollection against IAMC variable definitions. @@ -102,7 +104,9 @@ def _read_mappings(self) -> dict: def _read_pypsa_network_collection(self) -> pypsa.NetworkCollection: """Reads in pypsa networks as NetworkCollection from network_results_path / networks""" - return pypsa.NetworkCollection(self.network_results_path / "networks") + nw_path = self.network_results_path / "networks" + file_list = [nw_path / f for f in os.listdir(nw_path) if f.endswith(".nc")] + return pypsa.NetworkCollection(file_list) def read_definitions(self) -> nomenclature.DataStructureDefinition: """Read IAMC variable definitions from the definitions folder. @@ -120,8 +124,8 @@ def read_definitions(self) -> nomenclature.DataStructureDefinition: return dsd def _execute_function_for_variable( - self, variable: str - ) -> pyam.IamDataFrame | None: + self, variable: str, n: pypsa.Network + ) -> pd.Series | None: """Look up and execute the statistics function for a single variable. Looks up *variable* in ``self.functions_dict``, imports the @@ -135,7 +139,7 @@ def _execute_function_for_variable( Returns ------- - pyam.IamDataFrame | None + pd.Series | None Computed values for the variable, or ``None`` if no function is registered for it. """ @@ -154,7 +158,22 @@ def _execute_function_for_variable( f"WARNING: Variable {variable}: No function '{func_name}' not found in statistics_functions.py" ) return None - return func(self.network_collection) + return func(n) + + def _postprocess_statistics_result( + self, variable: str, result: pd.Series + ) -> pd.DataFrame: + """Formatting and creating a pandas dataframe from results Series and variable_name""" + result = result.xs(self.country, level="country") + df = pd.DataFrame( + { + "variable": [variable] * len(list(result.values)), + "unit": list(result.index.get_level_values("unit").map(UNITS_MAPPING)), + "value": list(result.values), + } + ) + df = df.groupby(["variable", "unit"]).sum() + return df def structure_pyam_from_pandas(self, df: pd.DataFrame) -> pyam.IamDataFrame: """Creates a pyam.IamDataFrame from a pandas DataFrame. @@ -169,6 +188,8 @@ def structure_pyam_from_pandas(self, df: pd.DataFrame) -> pyam.IamDataFrame: pyam.IamDataFrame A pyam.IamDataFrame with IAMC variables as columns and years as index. """ + # add 'variable' and 'unit' columns + df = df.reset_index() # rename columns if needed col_renaming_dict = { "variable": "variable_name", @@ -184,7 +205,7 @@ def structure_pyam_from_pandas(self, df: pd.DataFrame) -> pyam.IamDataFrame: data=df.drop_duplicates(), model=self.model_name, scenario=self.scenario_name, - region=self.country, + region=EU27_COUNTRY_CODES.get(self.country, self.country), variable="variable_name", unit="unit_pypsa", ) @@ -205,17 +226,29 @@ def calculate_variables_values(self) -> None: pyam.IamDataFrame Combined results for all variables that have a registered function. """ - - results = [] - for variable in self.dsd.variable.to_pandas()["variable"]: - result = self._execute_function_for_variable(variable) - if result is not None: - results.append(result) - - if results: - ds_with_values = pyam.concat(results) - else: - ds_with_values = None + container_investment_years = [] + for i in range(0, self.network_collection.__len__()): + n = self.network_collection[i] + investment_year = n.meta["wildcards"]["planning_horizons"] + results = [] + for variable in self.dsd.variable.to_pandas()["variable"]: + result = self._execute_function_for_variable(variable, n) + if result is not None: + results.append( + self._postprocess_statistics_result(variable, result) + ) + + if results: + year_df = pd.concat(results, ignore_index=False) + year_df.rename(columns={"value": str(investment_year)}, inplace=True) + container_investment_years.append(year_df) + if len(container_investment_years) > 0: + ds_with_values = container_investment_years[0] + if len(container_investment_years) > 1: + for year_df in container_investment_years[1:]: + ds_with_values = ds_with_values.merge( + year_df, on=["variable", "unit"], how="outer" + ) self.dsd_with_values = self.structure_pyam_from_pandas(ds_with_values) diff --git a/pypsa_validation_processing/configs/config.default.yaml b/pypsa_validation_processing/configs/config.default.yaml index 97e0f3b..a66c99c 100644 --- a/pypsa_validation_processing/configs/config.default.yaml +++ b/pypsa_validation_processing/configs/config.default.yaml @@ -1,10 +1,10 @@ # General section -country: # ISO 3166-1 alpha-2 country code, e.g. AT -definition_path: # path to the IAMC variable definitions folder +country: AT # ISO 3166-1 alpha-2 country code, e.g. AT +definitions_path: sister_packages/energy-scenarios-at-workflow/definitions # path to the IAMC variable definitions folder # mapping_path: # optional: path to mapping YAML; defaults to configs/mapping.default.yaml -output_path: # path the outputfile should be written to +output_path: resources # path the outputfile should be written to # Network -network_results_path: # path to the folder containing PyPSA network results -model_name: # name of the PyPSA model -scenario_name: # name of the PyPSA scenario +network_results_path: resources/AT_KN2040/ # path to the folder containing PyPSA network results +model_name: pypsa-at # name of the PyPSA model +scenario_name: KN2040test # name of the PyPSA scenario diff --git a/pypsa_validation_processing/configs/mapping.default.yaml b/pypsa_validation_processing/configs/mapping.default.yaml index 5f4511b..754603e 100644 --- a/pypsa_validation_processing/configs/mapping.default.yaml +++ b/pypsa_validation_processing/configs/mapping.default.yaml @@ -4,3 +4,6 @@ # # Example: # Primary Energy|Coal: primary_energy_coal + +Final Energy [by Carrier]|Electricity: Final_Energy_by_Carrier__Electricity +Final Energy [by Sector]|Transportation: Final_Energy_by_Sector__Transportation diff --git a/pypsa_validation_processing/statistics_functions.py b/pypsa_validation_processing/statistics_functions.py index fcc1a40..007cfe3 100644 --- a/pypsa_validation_processing/statistics_functions.py +++ b/pypsa_validation_processing/statistics_functions.py @@ -1,11 +1,116 @@ """Statistics functions for PyPSA validation processing. Each function in this module corresponds to one IAMC variable and extracts -the relevant value from a given PyPSA NetworkCollection. The functions are +the relevant value from a given PyPSA Network. The functions are looked up by name via the mapping defined in ``configs/mapping.default.yaml``. All functions share the same signature:: - def (pypsa.NetworkCollection) -> pyam.IamDataFrame: + def (network_collection: pypsa.Network) -> pd.Series: ... + +Each function returns a :class:`pandas.Series` with Multiindex, holding at +least the indexes ``variable`` and ``unit``. """ + +from __future__ import annotations + +import pandas as pd +import pypsa + + +def Final_Energy_by_Carrier__Electricity( + n: pypsa.Network, +) -> pd.DataFrame: + """Extract electricity final energy from a PyPSA NetworkCollection. + + Returns the total electricity consumption (excluding transmission / + distribution losses) across all networks in *network_collection*. + + Parameters + ---------- + network_collection : pypsa.NetworkCollection + Collection of PyPSA networks to process. + + Returns + ------- + pd.DataFrame + Long-format DataFrame with columns ``variable``, ``unit``, ``year``, + and ``value``. The ``variable`` column contains + ``"Final Energy [by Carrier]|Electricity"`` for every row. + + Notes + ----- + The actual extraction of electricity final energy from the network + collection will be implemented by the user. A typical call would be:: + + network_collection.statistics.energy_balance( + comps=["Load"], bus_carrier="AC" + ) + + The current implementation returns a dummy value of ``0.0 MWh`` for the + year 2020 so that the end-to-end workflow can be tested. + """ + # withdrawal from electricity including low_voltage + res = n.statistics.energy_balance( + bus_carrier="AC", groupby=["carrier", "country", "unit"], direction="withdrawal" + ) + # as battery is Store, discharger-link needs to be evaluated separately. + res_storage = n.statistics.energy_balance( + bus_carrier="AC", + groupby=["carrier", "country", "unit"], + carrier=["battery discharger"], + ) + return pd.concat([res, res_storage], axis=0).groupby(["country", "unit"]).sum() + + +def Final_Energy_by_Sector__Transportation( + n: pypsa.Network, +) -> pd.DataFrame: + """Extract transportation-sector final energy from a PyPSA NetworkCollection. + + Returns the total energy consumed by the transportation sector (excluding + transmission / distribution losses) across all networks in + *network_collection*. + + Parameters + ---------- + network_collection : pypsa.NetworkCollection + Collection of PyPSA networks to process. + + Returns + ------- + pd.DataFrame + Long-format DataFrame with columns ``variable``, ``unit``, ``year``, + and ``value``. The ``variable`` column contains + ``"Final Energy [by Sector]|Transportation"`` for every row. + + Notes + ----- + The actual extraction of transportation final energy from the network + collection will be implemented by the user. A typical call would be:: + + network_collection.statistics.energy_balance( + comps=["Load"], carrier="transport" + ) + + The current implementation returns a dummy value of ``0.0 MWh`` for the + year 2020 so that the end-to-end workflow can be tested. + """ + # sum over all transportation-relevant sectors - 2 different units involved. + result = ( + n.statistics.energy_balance( + carrier=[ + "land transport EV", + "land transport fuel cell", + "kerosene for aviation", + "shipping methanol", + ], + components="Load", + groupby=["carrier", "unit", "country"], + direction="withdrawal", + ) + .groupby(["country", "unit"]) + .sum() + ) + return result diff --git a/pypsa_validation_processing/utils.py b/pypsa_validation_processing/utils.py new file mode 100644 index 0000000..b50562c --- /dev/null +++ b/pypsa_validation_processing/utils.py @@ -0,0 +1,40 @@ +"""Static information and general utility functions for pypsa_validation_processing.""" + +EU27_COUNTRY_CODES: dict[str, str] = { + "AT": "Austria", + "BE": "Belgium", + "BG": "Bulgaria", + "CY": "Cyprus", + "CZ": "Czechia", + "DE": "Germany", + "DK": "Denmark", + "EE": "Estonia", + "ES": "Spain", + "FI": "Finland", + "FR": "France", + "GR": "Greece", + "HR": "Croatia", + "HU": "Hungary", + "IE": "Ireland", + "IT": "Italy", + "LT": "Lithuania", + "LU": "Luxembourg", + "LV": "Latvia", + "MT": "Malta", + "NL": "Netherlands", + "PL": "Poland", + "PT": "Portugal", + "RO": "Romania", + "SE": "Sweden", + "SI": "Slovenia", + "SK": "Slovakia", + "EU27_{year}": "EU27", +} + +UNITS_MAPPING = { + "MWh_el": "MWh", + "MWh_LHV": "MWh", + "MWh_th": "MWh", + "t_co2": "t", + "": "", +} diff --git a/tests/README.md b/tests/README.md index 1fe2857..cf28331 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,9 +1,195 @@ # Tests -Unit and integration tests for `pypsa_validation_processing`. +Comprehensive unit and integration tests for `pypsa_validation_processing`. -Run with: +## Running Tests + +Run all tests: ```bash pytest tests/ -v ``` + +Run tests for a specific module: + +```bash +pytest tests/test_statistics_functions.py -v +pytest tests/test_network_processor.py -v +pytest tests/test_workflow.py -v +pytest tests/test_utils.py -v +``` + +Run with coverage: + +```bash +pytest tests/ --cov=pypsa_validation_processing --cov-report=html +``` + +## Test Structure + +### `conftest.py` +Shared pytest fixtures and mock objects: +- **`MockStatisticsAccessor`**: Mock PyPSA statistics accessor that simulates `network.statistics.energy_balance()` calls +- **`MockPyPSANetwork`**: Minimal mock PyPSA Network with required interface for statistics functions +- **`MockNetworkCollection`**: Mock collection of PyPSA networks +- **Fixtures**: Reusable fixtures for mock networks and configuration files + +### `test_statistics_functions.py` +Tests for `pypsa_validation_processing/statistics_functions.py`: + +**`TestFinalEnergyByCarrierElectricity`**: +- Validates return type (DataFrame) +- Checks MultiIndex structure (country, unit levels) +- Verifies non-empty results +- Confirms numeric value types +- Tests with multiple networks + +**`TestFinalEnergyBySectorTransportation`**: +- Similar comprehensive coverage as electricity tests +- Validates sector-specific data extraction +- Tests multi-network processing + +### `test_network_processor.py` +Tests for `pypsa_validation_processing/class_definitions.py` (`Network_Processor` class): + +**`TestNetworkProcessorInit`**: +- Valid configuration initialization +- Validation of required config parameters +- `__repr__` method output + +**`TestNetworkProcessorConfigReading`**: +- YAML configuration file parsing +- Path validation and error handling + +**`TestNetworkProcessorFunctionExecution`**: +- Function lookup and execution +- Handling of missing functions + +**`TestNetworkProcessorOutputGeneration`**: +- Output file creation +- Error handling when no data available +- Excel file generation + +### `test_workflow.py` +Tests for `pypsa_validation_processing/workflow.py`: + +**`TestGetDefaultConfigPath`**: +- Default config path resolution +- Path existence and format validation + +**`TestResolveConfigPath`**: +- Configuration path resolution from CLI arguments +- Tilde expansion +- Path absolutization + +**`TestBuildParser`**: +- ArgumentParser creation and configuration +- CLI argument handling + +**`TestMainWorkflow`**: +- Main workflow execution +- Network_Processor integration + +**`TestCLIBehavior`**: +- Help message generation +- Error handling for invalid arguments + +### `test_utils.py` +Tests for `pypsa_validation_processing/utils.py`: + +**`TestEU27CountryCodes`**: +- Dictionary structure validation +- All 27 EU member states present +- Sample country code mappings +- EU27 aggregate key presence + +## Adding Tests for New Statistics Functions + +When you add a new function to `pypsa_validation_processing/statistics_functions.py`, follow these steps: + +### Step 1: Create the Function +Add your function to `pypsa_validation_processing/statistics_functions.py`: + +```python +def My_New_IAMC_Variable(n: pypsa.Network) -> pd.DataFrame: + """Extract IAMC variable from PyPSA Network. + + Parameters + ---------- + n : pypsa.Network + PyPSA Network to extract data from + + Returns + ------- + pd.DataFrame + DataFrame with MultiIndex including 'country' and 'unit' + """ + # Implementation here + result = n.statistics.energy_balance(...) + return result.groupby(["country", "unit"]).sum() +``` + +### Step 2: Update the Mapping File +Add the function mapping to `pypsa_validation_processing/configs/mapping.default.yaml`: + +```yaml +My New IAMC Variable: My_New_IAMC_Variable +``` + +### Step 3: Add Test Class +In `tests/test_statistics_functions.py`, add a new test class for your function: + +```python +class TestMyNewIamcVariable: + """Test suite for My_New_IAMC_Variable function.""" + + def test_returns_dataframe(self, mock_network: MockPyPSANetwork): + """Test that the function returns a pandas DataFrame.""" + result = My_New_IAMC_Variable(mock_network) + assert isinstance(result, pd.DataFrame) + + def test_has_country_and_unit_index(self, mock_network: MockPyPSANetwork): + """Test that result has country and unit in the index.""" + result = My_New_IAMC_Variable(mock_network) + assert "country" in result.index.names + assert "unit" in result.index.names + + def test_not_empty(self, mock_network: MockPyPSANetwork): + """Test that result is not empty.""" + result = My_New_IAMC_Variable(mock_network) + assert len(result) > 0 + + def test_numeric_values(self, mock_network: MockPyPSANetwork): + """Test that result values are numeric.""" + result = My_New_IAMC_Variable(mock_network) + assert pd.api.types.is_numeric_dtype(result.dtype) + + def test_contains_austria(self, mock_network: MockPyPSANetwork): + """Test that result contains Austria (AT) data.""" + result = My_New_IAMC_Variable(mock_network) + assert "AT" in result.index.get_level_values("country") +``` + +### Step 4: Run Tests +Verify your new tests pass: + +```bash +pytest tests/test_statistics_functions.py::TestMyNewIamcVariable -v +``` + +## Test Coverage Goals + +- **statistics_functions.py**: 100% - Each function must have dedicated test class +- **Network_Processor class**: Unit tests for all public methods +- **workflow.py**: Tests for CLI argument parsing and main execution +- **utils.py**: Tests for constant definitions and mappings + +## Mock Objects + +The test suite uses mock PyPSA objects to avoid requiring actual network files: + +- **`MockStatisticsAccessor`**: Simulates `network.statistics.energy_balance()` with realistic MultiIndex structure +- **`MockPyPSANetwork`**: Minimal network with metadata and statistics interface +- **`MockNetworkCollection`**: Iterable collection of mock networks + +These mocks provide sufficient interface for testing without network file dependencies. diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..899a27c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,193 @@ +"""Shared test fixtures and configuration for pypsa_validation_processing tests.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import MagicMock + +import pandas as pd +import pytest + + +class MockStatisticsAccessor: + """Mock PyPSA Statistics accessor for testing. + + This mock provides the energy_balance method that returns realistic + pandas Series with MultiIndex structure matching PyPSA output. + """ + + def __init__(self, network_data: dict | None = None): + """Initialize with optional predefined network data.""" + self.network_data = network_data or {} + + def energy_balance( + self, + bus_carrier: str | None = None, + carrier: list[str] | str | None = None, + components: str | list[str] | None = None, + groupby: list[str] | None = None, + direction: str = "withdrawal", + ) -> pd.Series: + """Mock energy_balance method for PyPSA Network.statistics. + + Returns a pandas Series with MultiIndex including 'country' and 'unit' + to match the expected output structure. + + Parameters + ---------- + bus_carrier : str | None + Bus carrier filter (e.g., "AC" for AC bus) + carrier : list[str] | str | None + Carrier filter + components : str | list[str] | None + Components to include + groupby : list[str] | None + Grouping keys for the result + direction : str + Direction of energy flow ("withdrawal" or "supply") + + Returns + ------- + pd.Series + Series with MultiIndex containing 'country' and 'unit' levels + """ + # Default groupby if not specified + if groupby is None: + groupby = ["carrier", "country", "unit"] + + # Create mock data structure based on groupby + index_tuples = [] + values = [] + + # Generate mock data with different carriers based on input + if isinstance(carrier, list): + carriers = carrier + elif isinstance(carrier, str): + carriers = [carrier] + elif bus_carrier == "AC": + carriers = ["wind", "solar", "hydro"] + else: + carriers = ["electricity"] + + for c in carriers: + for country in ["AT", "DE"]: + for unit in ["MWh_el", "MWh_th"]: + # Create index tuple based on groupby keys + idx_dict = { + "carrier": c, + "country": country, + "unit": unit, + } + idx_tuple = tuple(idx_dict[key] for key in groupby) + index_tuples.append(idx_tuple) + # Mock value: roughly realistic energy value + values.append(1000.0) + + # Create MultiIndex Series + index = pd.MultiIndex.from_tuples(index_tuples, names=groupby) + return pd.Series(values, index=index, dtype=float) + + +class MockPyPSANetwork: + """Minimal mock PyPSA Network for unit testing. + + Provides the interface required by statistics_functions without + needing actual network files. + """ + + def __init__(self, name: str = "test_network", **kwargs): + """Initialize mock network. + + Parameters + ---------- + name : str + Name of the network + **kwargs + Additional attributes to set on the network + """ + self.name = name + self.meta = { + "wildcards": {"planning_horizons": 2020}, + } + self.statistics = MockStatisticsAccessor() + + # Set any additional attributes + for key, value in kwargs.items(): + setattr(self, key, value) + + +class MockNetworkCollection: + """Mock PyPSA NetworkCollection for testing. + + Simulates a collection of networks that can be indexed and iterated. + """ + + def __init__(self, networks: list[MockPyPSANetwork] | None = None): + """Initialize mock collection. + + Parameters + ---------- + networks : list[MockPyPSANetwork] | None + List of networks in the collection. If None, creates default networks. + """ + self.networks = networks or [ + MockPyPSANetwork(name="network_2020"), + MockPyPSANetwork(name="network_2030"), + ] + + def __len__(self) -> int: + return len(self.networks) + + def __getitem__(self, index: int) -> MockPyPSANetwork: + return self.networks[index] + + def __iter__(self): + return iter(self.networks) + + +# ============================================================================= +# Pytest Fixtures +# ============================================================================= + + +@pytest.fixture +def mock_network() -> MockPyPSANetwork: + """Fixture providing a single mock PyPSA Network.""" + return MockPyPSANetwork(name="test_network") + + +@pytest.fixture +def mock_network_collection() -> MockNetworkCollection: + """Fixture providing a mock PyPSA NetworkCollection.""" + return MockNetworkCollection() + + +@pytest.fixture +def mock_pypsa_network_with_metadata() -> MockPyPSANetwork: + """Fixture providing a mock network with realistic metadata.""" + network = MockPyPSANetwork( + name="AT_KN2040_test", + ) + network.meta = { + "wildcards": { + "planning_horizons": 2030, + "sector_opts": "355H", + }, + "scenarios": ["test_scenario"], + } + return network + + +@pytest.fixture +def temp_config_file(tmp_path: Path) -> Path: + """Fixture providing a temporary config file for testing.""" + config_content = """ +country: AT +model_name: AT_KN2040 +scenario_name: test +definitions_path: /tmp/definitions +network_results_path: /tmp/network_results +""" + config_file = tmp_path / "config.yaml" + config_file.write_text(config_content) + return config_file diff --git a/tests/test_network_processor.py b/tests/test_network_processor.py new file mode 100644 index 0000000..bd344fe --- /dev/null +++ b/tests/test_network_processor.py @@ -0,0 +1,235 @@ +"""Tests for pypsa_validation_processing.class_definitions.Network_Processor.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +import pandas as pd + +from pypsa_validation_processing.class_definitions import Network_Processor +from conftest import MockPyPSANetwork, MockNetworkCollection + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture +def mock_config_dict() -> dict: + """Fixture providing a mock configuration dictionary.""" + return { + "country": "AT", + "model_name": "AT_KN2040", + "scenario_name": "test", + "definitions_path": "/tmp/definitions", + "network_results_path": "/tmp/network_results", + } + + +@pytest.fixture +def mock_definitions_path(tmp_path: Path) -> Path: + """Fixture providing a mock definitions directory.""" + defs_path = tmp_path / "definitions" + defs_path.mkdir() + + # Create a minimal definitions file (CSV format expected by nomenclature) + definitions_file = defs_path / "variables.csv" + definitions_file.write_text( + "Variable\n" + "Final Energy [by Carrier]|Electricity\n" + "Final Energy [by Sector]|Transportation\n" + ) + return defs_path + + +@pytest.fixture +def mock_network_results_path(tmp_path: Path) -> Path: + """Fixture providing a mock network results directory with networks.""" + nw_path = tmp_path / "networks" + nw_path.mkdir(parents=True, exist_ok=True) + + # Create dummy network files + for year in [2020, 2030]: + (nw_path / f"base_s_adm__none_{year}.nc").touch() + return nw_path.parent + + +@pytest.fixture +def mock_config_file( + tmp_path: Path, mock_definitions_path: Path, mock_network_results_path: Path +) -> Path: + """Fixture providing a temporary config file for testing.""" + config_content = f""" +country: AT +model_name: AT_KN2040 +scenario_name: test_scenario +definitions_path: {mock_definitions_path} +network_results_path: {mock_network_results_path} +output_path: {tmp_path / 'output.xlsx'} +""" + config_file = tmp_path / "config.yaml" + config_file.write_text(config_content) + return config_file + + +# --------------------------------------------------------------------------- +# Tests for Network_Processor initialization +# --------------------------------------------------------------------------- + + +class TestNetworkProcessorInit: + """Test Network_Processor initialization and configuration loading.""" + + def test_init_with_valid_config(self, mock_config_file: Path): + """Test initialization with a valid configuration file.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + assert processor.country == "AT" + assert processor.model_name == "AT_KN2040" + assert processor.scenario_name == "test_scenario" + + def test_init_missing_required_config(self, tmp_path: Path): + """Test initialization fails with missing required config keys.""" + config_file = tmp_path / "bad_config.yaml" + config_file.write_text("model_name: test\n") # Missing required keys + + with pytest.raises(ValueError): + Network_Processor(config_path=config_file) + + def test_repr_method(self, mock_config_file: Path): + """Test the __repr__ method returns informative string.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + repr_str = repr(processor) + assert "Network_Processor" in repr_str + assert "AT" in repr_str + + +# --------------------------------------------------------------------------- +# Tests for configuration reading +# --------------------------------------------------------------------------- + + +class TestNetworkProcessorConfigReading: + """Test configuration file reading functionality.""" + + def test_read_config(self, mock_config_file: Path): + """Test that config is read correctly from YAML.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + config = processor.config + assert isinstance(config, dict) + assert "country" in config + assert "model_name" in config + + def test_config_path_validation(self, tmp_path: Path): + """Test that nonexistent config file raises error.""" + nonexistent_config = tmp_path / "nonexistent.yaml" + with pytest.raises(FileNotFoundError): + Network_Processor(config_path=nonexistent_config) + + +# --------------------------------------------------------------------------- +# Tests for function execution +# --------------------------------------------------------------------------- + + +class TestNetworkProcessorFunctionExecution: + """Test function lookup and execution.""" + + def test_execute_function_for_variable_returns_series(self, mock_config_file: Path): + """Test that _execute_function_for_variable returns a Series.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + processor.functions_dict = { + "Final Energy [by Carrier]|Electricity": "Final_Energy_by_Carrier__Electricity" + } + + mock_network = MockPyPSANetwork() + result = processor._execute_function_for_variable( + "Final Energy [by Carrier]|Electricity", mock_network + ) + + assert isinstance(result, (pd.DataFrame, pd.Series)) or result is None + + def test_execute_function_not_found(self, mock_config_file: Path): + """Test that function returns None when not found.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + processor.functions_dict = {} + + mock_network = MockPyPSANetwork() + result = processor._execute_function_for_variable( + "Nonexistent Variable", mock_network + ) + assert result is None + + +# --------------------------------------------------------------------------- +# Tests for output generation +# --------------------------------------------------------------------------- + + +class TestNetworkProcessorOutputGeneration: + """Test output file generation.""" + + def test_write_output_raises_without_data(self, mock_config_file: Path): + """Test that write_output raises error if no data has been calculated.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + with pytest.raises(RuntimeError, match="No data available"): + processor.write_output_to_xlsx() + + def test_write_output_creates_file(self, mock_config_file: Path, tmp_path: Path): + """Test that write_output creates an Excel file.""" + with patch( + "pypsa_validation_processing.class_definitions.pypsa.NetworkCollection" + ): + with patch( + "pypsa_validation_processing.class_definitions.nomenclature.DataStructureDefinition" + ): + processor = Network_Processor(config_path=mock_config_file) + + # Mock the dsd_with_values + mock_iam_df = MagicMock() + processor.dsd_with_values = mock_iam_df + + output_path = tmp_path / "test_output.xlsx" + result_path = processor.write_output_to_xlsx(output_path=output_path) + + assert result_path == output_path + # Verify to_excel was called + mock_iam_df.to_excel.assert_called_once() diff --git a/tests/test_statistics_functions.py b/tests/test_statistics_functions.py new file mode 100644 index 0000000..bf16c45 --- /dev/null +++ b/tests/test_statistics_functions.py @@ -0,0 +1,101 @@ +"""Tests for pypsa_validation_processing.statistics_functions.""" + +from __future__ import annotations + +import pandas as pd +import pytest + +from pypsa_validation_processing.statistics_functions import ( + Final_Energy_by_Carrier__Electricity, + Final_Energy_by_Sector__Transportation, +) + +from conftest import MockPyPSANetwork, MockNetworkCollection + + +# --------------------------------------------------------------------------- +# Tests for Final_Energy_by_Carrier__Electricity +# --------------------------------------------------------------------------- + + +class TestFinalEnergyByCarrierElectricity: + """Test suite for Final_Energy_by_Carrier__Electricity function.""" + + def test_returns_dataframe(self, mock_network: MockPyPSANetwork): + """Test that the function returns a pandas DataFrame or Series.""" + result = Final_Energy_by_Carrier__Electricity(mock_network) + assert isinstance(result, (pd.DataFrame, pd.Series)) + + def test_has_country_and_unit_index(self, mock_network: MockPyPSANetwork): + """Test that result has country and unit in the index.""" + result = Final_Energy_by_Carrier__Electricity(mock_network) + assert "country" in result.index.names + assert "unit" in result.index.names + + def test_not_empty(self, mock_network: MockPyPSANetwork): + """Test that result is not empty.""" + result = Final_Energy_by_Carrier__Electricity(mock_network) + assert len(result) > 0 + + def test_numeric_values(self, mock_network: MockPyPSANetwork): + """Test that result values are numeric.""" + result = Final_Energy_by_Carrier__Electricity(mock_network) + assert result.dtype in [float, int] or pd.api.types.is_numeric_dtype( + result.dtype + ) + + def test_contains_austria(self, mock_network: MockPyPSANetwork): + """Test that result contains Austria (AT) data.""" + result = Final_Energy_by_Carrier__Electricity(mock_network) + assert "AT" in result.index.get_level_values("country") + + def test_multiple_networks(self, mock_network_collection: MockNetworkCollection): + """Test processing multiple networks from collection.""" + for network in mock_network_collection: + result = Final_Energy_by_Carrier__Electricity(network) + assert isinstance(result, (pd.DataFrame, pd.Series)) + assert len(result) > 0 + + +# --------------------------------------------------------------------------- +# Tests for Final_Energy_by_Sector__Transportation +# --------------------------------------------------------------------------- + + +class TestFinalEnergyBySectorTransportation: + """Test suite for Final_Energy_by_Sector__Transportation function.""" + + def test_returns_dataframe(self, mock_network: MockPyPSANetwork): + """Test that the function returns a pandas DataFrame or Series.""" + result = Final_Energy_by_Sector__Transportation(mock_network) + assert isinstance(result, (pd.DataFrame, pd.Series)) + + def test_has_country_and_unit_index(self, mock_network: MockPyPSANetwork): + """Test that result has country and unit in the index.""" + result = Final_Energy_by_Sector__Transportation(mock_network) + assert "country" in result.index.names + assert "unit" in result.index.names + + def test_not_empty(self, mock_network: MockPyPSANetwork): + """Test that result is not empty.""" + result = Final_Energy_by_Sector__Transportation(mock_network) + assert len(result) > 0 + + def test_numeric_values(self, mock_network: MockPyPSANetwork): + """Test that result values are numeric.""" + result = Final_Energy_by_Sector__Transportation(mock_network) + assert result.dtype in [float, int] or pd.api.types.is_numeric_dtype( + result.dtype + ) + + def test_contains_austria(self, mock_network: MockPyPSANetwork): + """Test that result contains Austria (AT) data.""" + result = Final_Energy_by_Sector__Transportation(mock_network) + assert "AT" in result.index.get_level_values("country") + + def test_multiple_networks(self, mock_network_collection: MockNetworkCollection): + """Test processing multiple networks from collection.""" + for network in mock_network_collection: + result = Final_Energy_by_Sector__Transportation(network) + assert isinstance(result, (pd.DataFrame, pd.Series)) + assert len(result) > 0 diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..779e76c --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,33 @@ +"""Tests for pypsa_validation_processing.utils.""" + +import pytest + +from pypsa_validation_processing.utils import EU27_COUNTRY_CODES + + +class TestEU27CountryCodes: + def test_is_dict(self): + assert isinstance(EU27_COUNTRY_CODES, dict) + + def test_has_all_27_member_states(self): + # All 27 EU member state ISO codes must be present + expected_codes = { + "AT", "BE", "BG", "CY", "CZ", "DE", "DK", "EE", "ES", "FI", + "FR", "GR", "HR", "HU", "IE", "IT", "LT", "LU", "LV", "MT", + "NL", "PL", "PT", "RO", "SE", "SI", "SK", + } + assert expected_codes.issubset(EU27_COUNTRY_CODES.keys()) + + def test_sample_mappings(self): + assert EU27_COUNTRY_CODES["AT"] == "Austria" + assert EU27_COUNTRY_CODES["DE"] == "Germany" + assert EU27_COUNTRY_CODES["FR"] == "France" + + def test_eu27_aggregate_key(self): + assert "EU27_{year}" in EU27_COUNTRY_CODES + assert EU27_COUNTRY_CODES["EU27_{year}"] == "EU27" + + def test_values_are_strings(self): + for key, value in EU27_COUNTRY_CODES.items(): + assert isinstance(key, str), f"Key {key!r} is not a string" + assert isinstance(value, str), f"Value {value!r} for key {key!r} is not a string" diff --git a/tests/test_workflow.py b/tests/test_workflow.py new file mode 100644 index 0000000..b61551b --- /dev/null +++ b/tests/test_workflow.py @@ -0,0 +1,176 @@ +"""Tests for pypsa_validation_processing.workflow module.""" + +from __future__ import annotations + +from pathlib import Path +from unittest.mock import MagicMock, patch +import sys +import io + +import pytest + +from pypsa_validation_processing.workflow import ( + get_default_config_path, + resolve_config_path, + build_parser, +) + + +# --------------------------------------------------------------------------- +# Tests for get_default_config_path +# --------------------------------------------------------------------------- + + +class TestGetDefaultConfigPath: + """Test get_default_config_path function.""" + + def test_returns_path_object(self): + """Test that function returns a Path object.""" + result = get_default_config_path() + assert isinstance(result, Path) + + def test_path_exists(self): + """Test that returned path exists.""" + result = get_default_config_path() + assert result.exists() + + def test_path_is_yaml_file(self): + """Test that returned path is a YAML file.""" + result = get_default_config_path() + assert result.suffix == ".yaml" + + def test_path_has_config_in_name(self): + """Test that returned path contains 'config' in filename.""" + result = get_default_config_path() + assert "config" in result.name.lower() + + +# --------------------------------------------------------------------------- +# Tests for resolve_config_path +# --------------------------------------------------------------------------- + + +class TestResolveConfigPath: + """Test resolve_config_path function.""" + + def test_resolve_with_none_returns_default(self): + """Test that None argument returns default config path.""" + result = resolve_config_path(None) + assert result == get_default_config_path() + + def test_resolve_with_absolute_path(self, tmp_path: Path): + """Test that absolute path is resolved correctly.""" + test_config = tmp_path / "custom_config.yaml" + test_config.write_text("test: data") + + result = resolve_config_path(str(test_config)) + assert result == test_config + + def test_resolve_expands_tilde(self, tmp_path: Path): + """Test that ~ in path is expanded.""" + # This is a basic test - actual tilde expansion depends on system + config_path = "~/test_config.yaml" + result = resolve_config_path(config_path) + assert "~" not in str(result) + + def test_resolve_returns_absolute_path(self, tmp_path: Path): + """Test that returned path is absolute.""" + test_config = tmp_path / "config.yaml" + test_config.write_text("test: data") + + result = resolve_config_path(str(test_config)) + assert result.is_absolute() + + +# --------------------------------------------------------------------------- +# Tests for build_parser +# --------------------------------------------------------------------------- + + +class TestBuildParser: + """Test build_parser function.""" + + def test_returns_argument_parser(self): + """Test that function returns an ArgumentParser.""" + from argparse import ArgumentParser + + parser = build_parser() + assert isinstance(parser, ArgumentParser) + + def test_parser_has_config_argument(self): + """Test that parser has --config argument.""" + parser = build_parser() + # Parse with --config to verify it's accepted + args = parser.parse_args(["--config", "test.yaml"]) + assert args.config == "test.yaml" + + def test_parser_config_defaults_to_none(self): + """Test that --config defaults to None.""" + parser = build_parser() + args = parser.parse_args([]) + assert args.config is None + + def test_parser_accepts_config_path(self): + """Test that parser accepts a config file path.""" + parser = build_parser() + args = parser.parse_args(["--config", "/path/to/config.yaml"]) + assert args.config == "/path/to/config.yaml" + + +# --------------------------------------------------------------------------- +# Tests for main workflow +# --------------------------------------------------------------------------- + + +class TestMainWorkflow: + """Test the main workflow execution.""" + + @patch("pypsa_validation_processing.workflow.Network_Processor") + def test_main_execution(self, mock_processor_class): + """Test that main() creates and uses Network_Processor correctly.""" + mock_processor = MagicMock() + mock_processor_class.return_value = mock_processor + + # Import main here to avoid issues + from pypsa_validation_processing.workflow import main + + # Mock sys.argv + with patch.object(sys, "argv", ["workflow.py", "--config", "test.yaml"]): + with patch( + "pypsa_validation_processing.workflow.resolve_config_path", + return_value=Path("test.yaml"), + ): + with patch("pypsa_validation_processing.workflow.Path"): + with patch( + "pypsa_validation_processing.workflow.Network_Processor", + return_value=mock_processor, + ): + try: + main() + except (SystemExit, FileNotFoundError): + # Expected if config doesn't exist + pass + + +# --------------------------------------------------------------------------- +# Tests for CLI behavior +# --------------------------------------------------------------------------- + + +class TestCLIBehavior: + """Test command-line interface behavior.""" + + def test_help_message(self): + """Test that --help produces valid output.""" + parser = build_parser() + help_text = parser.format_help() + assert "config" in help_text.lower() + assert "PyPSA" in help_text or "IAMC" in help_text + + def test_unknown_argument_raises_error(self): + """Test that unknown arguments raise an error.""" + from argparse import ArgumentError + + parser = build_parser() + with pytest.raises(SystemExit): + parser.parse_args(["--unknown-arg", "value"])