diff --git a/.gitignore b/.gitignore index 1b9d52962e..1d6cefba48 100644 --- a/.gitignore +++ b/.gitignore @@ -111,5 +111,6 @@ doc/_sidebar.rst.inc *.ESMF_LogFile -#Ignore vscode AI rules +# AI .github/instructions/codacy.instructions.md +CLAUDE.md diff --git a/esmvalcore/_recipe/from_datasets.py b/esmvalcore/_recipe/from_datasets.py index 996c81b37a..04cd31cb17 100644 --- a/esmvalcore/_recipe/from_datasets.py +++ b/esmvalcore/_recipe/from_datasets.py @@ -67,9 +67,7 @@ def _datasets_to_recipe(datasets: Iterable[Dataset]) -> Recipe: f"'diagnostic' facet missing from {dataset}," "unable to convert to recipe." ) - raise RecipeError( - msg, - ) + raise RecipeError(msg) recipe = _datasets_to_raw_recipe(datasets) diagnostics = recipe["diagnostics"].values() diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index 11178df503..69316038c0 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -177,16 +177,12 @@ def _py2ncl(value, var_name=""): type_ = type(value[0]) if any(not isinstance(v, type_) for v in value): msg = f"NCL array cannot be mixed type: {value}" - raise ValueError( - msg, - ) + raise ValueError(msg) txt += "(/{}/)".format(", ".join(_py2ncl(v) for v in value)) elif isinstance(value, dict): if not var_name: msg = f"NCL does not support nested dicts: {value}" - raise ValueError( - msg, - ) + raise ValueError(msg) txt += "True\n" for key in value: txt += f"{var_name}@{key} = {_py2ncl(value[key])}\n" @@ -272,9 +268,7 @@ def initialize_provenance(self, recipe_entity): """Initialize task provenance activity.""" if self.activity is not None: msg = f"Provenance of {self} already initialized" - raise ValueError( - msg, - ) + raise ValueError(msg) self.activity = get_task_provenance(self, recipe_entity) def flatten(self): @@ -445,18 +439,14 @@ def _initialize_cmd(self): msg = ( f"{err_msg}: program '{interpreters[ext]}' not installed." ) - raise DiagnosticError( - msg, - ) + raise DiagnosticError(msg) cmd.append(interpreter) elif not os.access(script_file, os.X_OK): msg = ( f"{err_msg}: non-executable file with unknown extension " f"'{script_file.suffix}'." ) - raise DiagnosticError( - msg, - ) + raise DiagnosticError(msg) cmd.extend(args.get(ext, [])) cmd.append(str(script_file)) @@ -674,9 +664,7 @@ def _run(self, input_files): f"Diagnostic script {self.script} failed with return code {returncode}. See the log " f"in {self.log}" ) - raise DiagnosticError( - msg, - ) + raise DiagnosticError(msg) def _collect_provenance(self) -> None: """Process provenance information provided by the diagnostic script.""" diff --git a/esmvalcore/_version.py b/esmvalcore/_version.py index 394a7a7e23..6b9736a89c 100644 --- a/esmvalcore/_version.py +++ b/esmvalcore/_version.py @@ -9,6 +9,4 @@ "ESMValCore package not found, please run `pip install -e .` before " "importing the package." ) - raise PackageNotFoundError( - msg, - ) from exc + raise PackageNotFoundError(msg) from exc diff --git a/esmvalcore/cmor/_fixes/access/_base_fix.py b/esmvalcore/cmor/_fixes/access/_base_fix.py index bb8b2dfea7..5b7a122a88 100644 --- a/esmvalcore/cmor/_fixes/access/_base_fix.py +++ b/esmvalcore/cmor/_fixes/access/_base_fix.py @@ -75,9 +75,7 @@ def _get_path_from_facet(self, facet): path = Path(self.extra_facets[facet]) if not path.is_file(): msg = f"'{path}' given by facet '{facet}' does not exist" - raise FileNotFoundError( - msg, - ) + raise FileNotFoundError(msg) return path def load_ocean_grid_data(self, facet): diff --git a/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py b/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py index d4bbcdf8cf..ed0d76190a 100644 --- a/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py +++ b/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py @@ -131,9 +131,7 @@ def _check_grid_differences(self, old_coord, new_coord): "Differences between the original grid and the " f"standardised grid are above 10e-4 {new_coord.units}." ) - raise RecipeError( - msg, - ) + raise RecipeError(msg) def _fix_rotated_coords(self, cube, domain, domain_info): """Fix rotated coordinates.""" @@ -221,8 +219,6 @@ def fix_metadata(self, cubes): "not supported in CORDEX datasets. Must be " "rotated_latitude_longitude or lambert_conformal_conic." ) - raise RecipeError( - msg, - ) + raise RecipeError(msg) return cubes diff --git a/esmvalcore/cmor/_fixes/emac/_base_fixes.py b/esmvalcore/cmor/_fixes/emac/_base_fixes.py index 7053b08fab..25a4f4c2dc 100644 --- a/esmvalcore/cmor/_fixes/emac/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/emac/_base_fixes.py @@ -40,9 +40,7 @@ def get_cube(self, cubes, var_name=None): f"available in the input file. Please specify a valid `raw_name` " f"in the recipe or as extra facets." ) - raise ValueError( - msg, - ) + raise ValueError(msg) class NegateData(EmacFix): diff --git a/esmvalcore/cmor/_fixes/emac/emac.py b/esmvalcore/cmor/_fixes/emac/emac.py index 911bd46b13..6ff2e7e45e 100644 --- a/esmvalcore/cmor/_fixes/emac/emac.py +++ b/esmvalcore/cmor/_fixes/emac/emac.py @@ -117,9 +117,7 @@ def _fix_plev(self, cube): f"'{self.vardef.short_name}', searched for Z-coordinates with " f"units that are convertible to Pa" ) - raise ValueError( - msg, - ) + raise ValueError(msg) @staticmethod def _fix_alevel(cube, cubes): diff --git a/esmvalcore/cmor/_fixes/icon/_base_fixes.py b/esmvalcore/cmor/_fixes/icon/_base_fixes.py index 124b56de5d..4082f96bc5 100644 --- a/esmvalcore/cmor/_fixes/icon/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/icon/_base_fixes.py @@ -162,9 +162,7 @@ def _get_grid_url(self, cube: Cube) -> tuple[str, str]: f"necessary to download the ICON horizontal grid file:\n" f"{cube}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) grid_url = cube.attributes[self.GRID_FILE_ATTR] parsed_url = urlparse(grid_url) grid_name = Path(parsed_url.path).name @@ -223,9 +221,7 @@ def _get_path_from_facet( f"relative to the auxiliary_data_dir " f"'{self.session['auxiliary_data_dir']}')" ) - raise FileNotFoundError( - msg, - ) + raise FileNotFoundError(msg) path = new_path return path @@ -650,9 +646,7 @@ def _add_coord_from_grid_file(self, cube: Cube, coord_name: str) -> None: f"'{coord_name}', cube does not contain a single unnamed " f"dimension:\n{cube}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) coord_dims: tuple[()] | tuple[int] = () for idx in range(cube.ndim): if not cube.coords(dimensions=idx, dim_coords=True): @@ -678,9 +672,7 @@ def _add_time(self, cube: Cube, cubes: CubeList) -> Cube: f"'{self.vardef.short_name}', cube and other cubes in file do not " f"contain it" ) - raise ValueError( - msg, - ) + raise ValueError(msg) def _get_z_coord( self, @@ -987,9 +979,7 @@ def _get_previous_timestep(self, datetime_point: datetime) -> datetime: f"got {datetime_point}. Use `shift_time=false` in the " f"recipe to disable this feature" ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Decadal data if "dec" in freq: @@ -1082,9 +1072,7 @@ def _fix_invalid_time_units(time_coord: Coord) -> None: f"Expected time units '{time_format}' in input file, got " f"'{t_unit}'" ) - raise ValueError( - msg, - ) + raise ValueError(msg) new_t_units = Unit( "days since 1850-01-01", calendar="proleptic_gregorian", diff --git a/esmvalcore/cmor/_fixes/native_datasets.py b/esmvalcore/cmor/_fixes/native_datasets.py index 2f7f24b763..a4974c80c3 100644 --- a/esmvalcore/cmor/_fixes/native_datasets.py +++ b/esmvalcore/cmor/_fixes/native_datasets.py @@ -86,9 +86,7 @@ def fix_var_metadata(self, cube: Cube) -> None: f"Failed to fix invalid units '{invalid_units}' for " f"variable '{self.vardef.short_name}'" ) - raise ValueError( - msg, - ) from exc + raise ValueError(msg) from exc safe_convert_units(cube, self.vardef.units) # Fix attributes @@ -132,9 +130,7 @@ def get_cube( f"Variable '{var_name}' used to extract " f"'{self.vardef.short_name}' is not available in input file" ) - raise ValueError( - msg, - ) + raise ValueError(msg) return cubes.extract_cube(NameConstraint(var_name=var_name)) def fix_regular_time( diff --git a/esmvalcore/cmor/_fixes/shared.py b/esmvalcore/cmor/_fixes/shared.py index 13d478ea9f..713cb7e12c 100644 --- a/esmvalcore/cmor/_fixes/shared.py +++ b/esmvalcore/cmor/_fixes/shared.py @@ -52,9 +52,7 @@ def add_aux_coords_from_cubes(cube, cubes, coord_dict): f"Expected exactly one coordinate cube '{coord_name}' in " f"list of cubes {cubes}, got {len(coord_cube):d}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) coord_cube = coord_cube[0] aux_coord = cube_to_aux_coord(coord_cube) cube.add_aux_coord(aux_coord, coord_dims) @@ -137,9 +135,7 @@ def add_plev_from_altitude(cube): "Cannot add 'air_pressure' coordinate, 'altitude' coordinate not " "available" ) - raise ValueError( - msg, - ) + raise ValueError(msg) def add_altitude_from_plev(cube): @@ -185,9 +181,7 @@ def add_altitude_from_plev(cube): "Cannot add 'altitude' coordinate, 'air_pressure' coordinate not " "available" ) - raise ValueError( - msg, - ) + raise ValueError(msg) def add_scalar_depth_coord(cube, depth=0.0): @@ -355,16 +349,12 @@ def get_bounds_cube(cubes, coord_var_name): return cube[0] if len(cube) > 1: msg = f"Multiple cubes with var_name '{bound_var}' found" - raise ValueError( - msg, - ) + raise ValueError(msg) msg = ( f"No bounds for coordinate variable '{coord_var_name}' available in " f"cubes\n{cubes}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) @cache @@ -544,16 +534,12 @@ def get_time_bounds(time: Coord, freq: str) -> np.ndarray: f"For `n`-hourly data, `n` must be a divisor of 24, got " f"'{freq}'" ) - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) min_bound = date - timedelta(hours=n_hours / 2.0) max_bound = date + timedelta(hours=n_hours / 2.0) else: msg = f"Cannot guess time bounds for frequency '{freq}'" - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) bounds.append([min_bound, max_bound]) return date2num(np.array(bounds), time.units, time.dtype) diff --git a/esmvalcore/cmor/_utils.py b/esmvalcore/cmor/_utils.py index 9728f8bcad..e2d33b762d 100644 --- a/esmvalcore/cmor/_utils.py +++ b/esmvalcore/cmor/_utils.py @@ -79,9 +79,7 @@ def _get_alternative_generic_lev_coord( f"Found no valid alternative coordinate for generic level coordinate " f"'{coord_name}'" ) - raise ValueError( - msg, - ) + raise ValueError(msg) def _get_generic_lev_coord_names( @@ -204,9 +202,7 @@ def _get_single_cube( f"but none of their var_names match the expected.\nFull list of " f"cubes encountered: {cube_list}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) logger.warning( "Found variable %s%s, but there were other present in the file. Those " "extra variables are usually metadata (cell area, latitude " diff --git a/esmvalcore/experimental/recipe_metadata.py b/esmvalcore/experimental/recipe_metadata.py index f674d33d49..6c6ee2543f 100644 --- a/esmvalcore/experimental/recipe_metadata.py +++ b/esmvalcore/experimental/recipe_metadata.py @@ -143,9 +143,7 @@ def __init__(self, filename: str) -> None: f"{self.__class__.__name__} cannot handle bibtex files " "with more than 1 entry." ) - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) self._bib_data = bib_data self._key, self._entry = next(iter(bib_data.entries.items())) @@ -202,8 +200,6 @@ def render(self, renderer: str = "html") -> str: rendered = formatter.text.render(backend) except Exception as err: # noqa: BLE001 msg = f"Could not render {self._key!r}: {err}" - raise RenderError( - msg, - ) from None + raise RenderError(msg) from None return rendered diff --git a/esmvalcore/io/esgf/_download.py b/esmvalcore/io/esgf/_download.py index abcb67ccf5..23a8e0fd69 100644 --- a/esmvalcore/io/esgf/_download.py +++ b/esmvalcore/io/esgf/_download.py @@ -551,9 +551,7 @@ def _download(self, local_file, url): f" downloaded from {url}: expected {checksum}, but got" f" {local_checksum}. Try downloading the file again." ) - raise DownloadError( - msg, - ) + raise DownloadError(msg) shutil.move(tmp_file, local_file) log_speed(url, self.size, duration.total_seconds()) diff --git a/esmvalcore/io/esgf/_search.py b/esmvalcore/io/esgf/_search.py index 01e42a0bb4..d1e7358e77 100644 --- a/esmvalcore/io/esgf/_search.py +++ b/esmvalcore/io/esgf/_search.py @@ -347,9 +347,7 @@ def find_files(*, project, short_name, dataset, **facets): f"Unable to download from ESGF, because project {project} is not" " on it or is not supported by the esmvalcore.io.esgf module." ) - raise ValueError( - msg, - ) + raise ValueError(msg) # The project is required for the function to work. facets["project"] = project diff --git a/esmvalcore/iris_helpers.py b/esmvalcore/iris_helpers.py index dfba3ca503..18db216a09 100644 --- a/esmvalcore/iris_helpers.py +++ b/esmvalcore/iris_helpers.py @@ -522,9 +522,7 @@ def safe_convert_units(cube: Cube, units: str | Unit) -> Cube: f"standard_name changed from '{old_standard_name}' to " f"'{cube.standard_name}'" ) - raise ValueError( - msg, - ) + raise ValueError(msg) return cube @@ -624,9 +622,7 @@ def dataset_to_iris( f"Expected type ncdata.NcData or xr.Dataset for dataset, got " f"type {type(dataset)}" ) - raise TypeError( - msg, - ) + raise TypeError(msg) with ignore_warnings_context(ignore_warnings): cubes = conversion_func(dataset) diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 0c7e08ff2a..34ceacad0c 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -250,9 +250,7 @@ def zonal_statistics( """ if cube.coord("longitude").points.ndim >= 2: msg = "Zonal statistics on irregular grids not yet implemented" - raise ValueError( - msg, - ) + raise ValueError(msg) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) with ignore_iris_vague_metadata_warnings(): result = cube.collapsed("longitude", agg, **agg_kwargs) @@ -301,9 +299,7 @@ def meridional_statistics( """ if cube.coord("latitude").points.ndim >= 2: msg = "Meridional statistics on irregular grids not yet implemented" - raise ValueError( - msg, - ) + raise ValueError(msg) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) with ignore_iris_vague_metadata_warnings(): result = cube.collapsed("latitude", agg, **agg_kwargs) @@ -421,17 +417,13 @@ def extract_named_regions(cube: Cube, regions: str | Iterable[str]) -> Cube: if not isinstance(regions, (list, tuple, set)): msg = f'Regions "{regions}" is not an acceptable format.' - raise TypeError( - msg, - ) + raise TypeError(msg) available_regions = set(cube.coord("region").points) invalid_regions = set(regions) - available_regions if invalid_regions: msg = f'Region(s) "{invalid_regions}" not in cube region(s): {available_regions}' - raise ValueError( - msg, - ) + raise ValueError(msg) constraints = iris.Constraint(region=lambda r: r in regions) return cube.extract(constraint=constraints) @@ -610,11 +602,8 @@ def _get_masks_from_geometries( ) -> dict[str, np.ndarray]: """Get cube masks from requested regions.""" if method not in {"contains", "representative"}: - msg = "Invalid value for `method`. Choose from 'contains', " - raise ValueError( - msg, - "'representative'.", - ) + msg = "Invalid value for `method`. Choose from 'contains', 'representative'." + raise ValueError(msg) masks = {} for id_, geometry in geometries.items(): diff --git a/esmvalcore/preprocessor/_cycles.py b/esmvalcore/preprocessor/_cycles.py index 400530040a..e4b7aed3dc 100644 --- a/esmvalcore/preprocessor/_cycles.py +++ b/esmvalcore/preprocessor/_cycles.py @@ -70,9 +70,7 @@ def amplitude(cube, coords): f"{cube.summary(shorten=True)} and cannot be added via " f"iris.coord_categorisation" ) - raise iris.exceptions.CoordinateNotFoundError( - msg, - ) + raise iris.exceptions.CoordinateNotFoundError(msg) # Calculate amplitude with ignore_iris_vague_metadata_warnings(): diff --git a/esmvalcore/preprocessor/_derive/amoc.py b/esmvalcore/preprocessor/_derive/amoc.py index 3607aa1d62..67b179f0dd 100644 --- a/esmvalcore/preprocessor/_derive/amoc.py +++ b/esmvalcore/preprocessor/_derive/amoc.py @@ -72,9 +72,7 @@ def calculate(cubes): f"Amoc calculation: {cube_orig} doesn't contain" f" atlantic_arctic_ocean." ) - raise ValueError( - msg, - ) + raise ValueError(msg) # 2: Remove the shallowest 500m to avoid wind driven mixed layer. depth_constraint = iris.Constraint(depth=lambda d: d >= 500.0) diff --git a/esmvalcore/preprocessor/_derive/ctotal.py b/esmvalcore/preprocessor/_derive/ctotal.py index b881f1e8ee..307ce01336 100644 --- a/esmvalcore/preprocessor/_derive/ctotal.py +++ b/esmvalcore/preprocessor/_derive/ctotal.py @@ -43,9 +43,7 @@ def calculate(cubes): f"standard name CMIP5: soil_carbon_content " f"or CMIP6: soil_mass_content_of_carbon" ) - raise ValueError( - msg, - ) from exc + raise ValueError(msg) from exc c_veg_cube = cubes.extract_cube( Constraint(name="vegetation_carbon_content"), ) diff --git a/esmvalcore/preprocessor/_mask.py b/esmvalcore/preprocessor/_mask.py index 790a58b3bd..916a724f5e 100644 --- a/esmvalcore/preprocessor/_mask.py +++ b/esmvalcore/preprocessor/_mask.py @@ -153,9 +153,7 @@ def mask_landsea(cube: Cube, mask_out: Literal["land", "sea"]) -> Cube: "Use of shapefiles with irregular grids not yet implemented, " "land-sea mask not applied." ) - raise ValueError( - msg, - ) + raise ValueError(msg) return cube @@ -528,9 +526,7 @@ def _get_shape(cubes): shapes = {cube.shape for cube in cubes} if len(shapes) > 1: msg = f"Expected cubes with identical shapes, got shapes {shapes}" - raise ValueError( - msg, - ) + raise ValueError(msg) return next(iter(shapes)) @@ -616,9 +612,7 @@ def mask_multimodel(products): f"iris.cube.Cube or esmvalcore.preprocessor.PreprocessorFile, " f"got {product_types}" ) - raise TypeError( - msg, - ) + raise TypeError(msg) def mask_fillvalues( @@ -685,9 +679,7 @@ def mask_fillvalues( valid = ~mask.all(axis=(-2, -1), keepdims=True) else: msg = f"Unable to handle {mask.ndim} dimensional data" - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) combined_mask = array_module.where( valid, combined_mask | mask, @@ -730,9 +722,7 @@ def _get_fillvalues_mask( f"Fraction of missing values {threshold_fraction} should be " f"between 0 and 1.0" ) - raise ValueError( - msg, - ) + raise ValueError(msg) nr_time_points = len(cube.coord("time").points) if time_window > nr_time_points: msg = "Time window (in time units) larger than total time span. Stop." diff --git a/esmvalcore/preprocessor/_multimodel.py b/esmvalcore/preprocessor/_multimodel.py index f15193db4b..d8ce839ff7 100644 --- a/esmvalcore/preprocessor/_multimodel.py +++ b/esmvalcore/preprocessor/_multimodel.py @@ -453,9 +453,7 @@ def _combine(cubes): f"Multi-model statistics failed to merge input cubes into a " f"single array:\n{cubes}\n{msg}" ) - raise ValueError( - msg, - ) from exc + raise ValueError(msg) from exc return merged_cube @@ -521,9 +519,7 @@ def _compute_eager( f"This can happen e.g. if the calculation results in inconsistent " f"dtypes" ) - raise ValueError( - msg, - ) from excinfo + raise ValueError(msg) from excinfo result_cube.data = np.ma.array(result_cube.data) @@ -575,9 +571,7 @@ def _multicube_statistics( """ if not cubes: msg = "Cannot perform multicube statistics for an empty list of cubes" - raise ValueError( - msg, - ) + raise ValueError(msg) # Avoid modifying inputs cubes = [cube.copy() for cube in cubes] @@ -608,9 +602,7 @@ def _multicube_statistics( "array: some cubes have a 'time' dimension, some do not have a " "'time' dimension." ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Calculate statistics statistics_cubes = {} @@ -681,9 +673,7 @@ def _get_operator_and_kwargs(statistic: str | dict) -> tuple[str, dict]: f"`statistic` given as dictionary, but missing required key " f"`operator`, got {statistic}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) operator = statistic.pop("operator") kwargs = statistic else: @@ -845,9 +835,7 @@ def multi_model_statistics( f"iris.cube.Cube or esmvalcore.preprocessor.PreprocessorFile, " f"got {products}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) def ensemble_statistics( diff --git a/esmvalcore/preprocessor/_regrid_unstructured.py b/esmvalcore/preprocessor/_regrid_unstructured.py index d803938186..73fedc741f 100644 --- a/esmvalcore/preprocessor/_regrid_unstructured.py +++ b/esmvalcore/preprocessor/_regrid_unstructured.py @@ -93,17 +93,13 @@ def __init__(self, src_cube: Cube, tgt_cube: Cube) -> None: f"Source cube {src_cube.summary(shorten=True)} does not have " f"unstructured grid" ) - raise ValueError( - msg, - ) + raise ValueError(msg) if not has_regular_grid(tgt_cube): msg = ( f"Target cube {tgt_cube.summary(shorten=True)} does not have " f"regular grid" ) - raise ValueError( - msg, - ) + raise ValueError(msg) src_lat = src_cube.coord("latitude").copy() src_lon = src_cube.coord("longitude").copy() tgt_lat = tgt_cube.coord("latitude").copy() @@ -211,18 +207,14 @@ def __call__(self, cube: Cube) -> Cube: f"Cube {cube.summary(shorten=True)} does not have " f"unstructured grid" ) - raise ValueError( - msg, - ) + raise ValueError(msg) coords = [cube.coord("latitude"), cube.coord("longitude")] if coords != self.src_coords: msg = ( f"The given cube {cube.summary(shorten=True)} is not defined " f"on the same source grid as this regridder" ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Get coordinates of regridded cube diff --git a/esmvalcore/preprocessor/_shared.py b/esmvalcore/preprocessor/_shared.py index ecf58028e2..f5709b3653 100644 --- a/esmvalcore/preprocessor/_shared.py +++ b/esmvalcore/preprocessor/_shared.py @@ -173,9 +173,7 @@ def update_weights_kwargs( kwargs = dict(kwargs) if not aggregator_accept_weights(aggregator) and "weights" in kwargs: msg = f"Aggregator '{operator}' does not support 'weights' option" - raise ValueError( - msg, - ) + raise ValueError(msg) if aggregator_accept_weights(aggregator) and kwargs.get("weights", True): kwargs["weights"] = weights if cube is not None and callback is not None: @@ -232,9 +230,7 @@ def get_normalized_cube( f"Expected 'subtract' or 'divide' for `normalize`, got " f"'{normalize}'" ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Keep old metadata except for units new_units = normalized_cube.units @@ -274,9 +270,7 @@ def preserve_float_dtype(func: Callable) -> Callable: f"Cannot preserve float dtype during function '{func.__name__}', " f"function takes no arguments" ) - raise TypeError( - msg, - ) + raise TypeError(msg) @wraps(func) def wrapper(*args: Any, **kwargs: Any) -> DataType: @@ -298,9 +292,7 @@ def wrapper(*args: Any, **kwargs: Any) -> DataType: f"type {type(result)} do not have the necessary attribute " f"'dtype'" ) - raise TypeError( - msg, - ) + raise TypeError(msg) return result @@ -381,9 +373,7 @@ def get_weights( f"`cell_area` can be given to the cube as supplementary " f"variable)" ) - raise CoordinateNotFoundError( - msg, - ) + raise CoordinateNotFoundError(msg) try_adding_calculated_cell_area(cube) area_weights = cube.cell_measure("cell_area").core_data() if cube.has_lazy_data(): @@ -438,18 +428,14 @@ def get_coord_weights( f"Cannot calculate weights for coordinate '{coord.name()}' " f"without bounds" ) - raise ValueError( - msg, - ) + raise ValueError(msg) if coord.core_bounds().shape[-1] != 2: msg = ( f"Cannot calculate weights for coordinate '{coord.name()}' " f"with {coord.core_bounds().shape[-1]} bounds per point, expected " f"2 bounds per point" ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Calculate weights of same shape as coordinate and make sure to use # identical chunks as parent cube for non-scalar lazy data @@ -573,9 +559,7 @@ def get_all_coords( f"{cube.summary(shorten=True)} must not have unnamed " f"dimensions" ) - raise ValueError( - msg, - ) + raise ValueError(msg) return coords diff --git a/esmvalcore/preprocessor/_supplementary_vars.py b/esmvalcore/preprocessor/_supplementary_vars.py index f7a1491d09..f1ed2915ff 100644 --- a/esmvalcore/preprocessor/_supplementary_vars.py +++ b/esmvalcore/preprocessor/_supplementary_vars.py @@ -82,9 +82,7 @@ def add_cell_measure( """ if measure not in ["area", "volume"]: msg = f"measure name must be 'area' or 'volume', got {measure} instead" - raise ValueError( - msg, - ) + raise ValueError(msg) coord_dims = tuple( range(cube.ndim - len(cell_measure_cube.shape), cube.ndim), ) diff --git a/esmvalcore/preprocessor/_units.py b/esmvalcore/preprocessor/_units.py index 413a289066..52e80ccff6 100644 --- a/esmvalcore/preprocessor/_units.py +++ b/esmvalcore/preprocessor/_units.py @@ -126,15 +126,11 @@ def accumulate_coordinate( f"Requested coordinate {coordinate} not found in cube " f"{cube.summary(shorten=True)}" ) - raise ValueError( - msg, - ) from err + raise ValueError(msg) from err if coord.ndim > 1: msg = f"Multidimensional coordinate {coord} not supported." - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) array_module = da if coord.has_lazy_bounds() else np factor = AuxCoord( diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index aefd15c8c1..e7d92f3a32 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -110,9 +110,7 @@ def extract_volume( 'Depth extraction bounds can be set to "open", "closed", ' f'"left_closed", or "right_closed". Got "{interval_bounds}".' ) - raise ValueError( - msg, - ) + raise ValueError(msg) z_constraint = iris.Constraint(coord_values=coord_values) @@ -162,18 +160,14 @@ def calculate_volume(cube: Cube) -> np.ndarray | da.Array: "Bounds should be 2 in the last dimension to compute the " "thickness." ) - raise ValueError( - msg, - ) + raise ValueError(msg) # Convert units to get the thickness in meters try: depth.convert_units("m") except ValueError as err: msg = f"Cannot compute volume using the Z-axis. {err}" - raise ValueError( - msg, - ) from err + raise ValueError(msg) from err # Calculate Z-direction thickness thickness = depth.core_bounds()[..., 1] - depth.core_bounds()[..., 0] @@ -314,9 +308,7 @@ def volume_statistics( "This may indicate Z axis depending on other dimension than " "space that could provoke invalid aggregation..." ) - raise ValueError( - msg, - ) + raise ValueError(msg) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) agg_kwargs = update_weights_kwargs( @@ -391,9 +383,7 @@ def axis_statistics( coord = cube.coord(axis=axis) except iris.exceptions.CoordinateNotFoundError as err: msg = f"Axis {axis} not found in cube {cube.summary(shorten=True)}" - raise ValueError( - msg, - ) from err + raise ValueError(msg) from err # Multidimensional coordinates are currently not supported coord_dims = cube.coord_dims(coord) @@ -401,9 +391,7 @@ def axis_statistics( msg = ( "axis_statistics not implemented for multidimensional coordinates." ) - raise NotImplementedError( - msg, - ) + raise NotImplementedError(msg) # For weighted operations, create a dummy weights coordinate using the # bounds of the original coordinate (this handles units properly, e.g., for @@ -621,9 +609,7 @@ def extract_trajectory( """ if len(latitudes) != len(longitudes): msg = "Longitude & Latitude coordinates have different lengths" - raise ValueError( - msg, - ) + raise ValueError(msg) if len(latitudes) == len(longitudes) == 2: minlat, maxlat = np.min(latitudes), np.max(latitudes) diff --git a/esmvalcore/preprocessor/_weighting.py b/esmvalcore/preprocessor/_weighting.py index 9089e1d62e..dc42727616 100644 --- a/esmvalcore/preprocessor/_weighting.py +++ b/esmvalcore/preprocessor/_weighting.py @@ -73,18 +73,14 @@ def weighting_landsea_fraction(cube, area_type): """ if area_type not in ("land", "sea"): msg = f"Expected 'land' or 'sea' for area_type, got '{area_type}'" - raise TypeError( - msg, - ) + raise TypeError(msg) (land_fraction, errors) = _get_land_fraction(cube) if land_fraction is None: msg = ( f"Weighting of '{cube.var_name}' with '{area_type}' fraction " f"failed because of the following errors: {' '.join(errors)}" ) - raise ValueError( - msg, - ) + raise ValueError(msg) core_data = cube.core_data() if area_type == "land": cube.data = core_data * land_fraction diff --git a/tests/integration/preprocessor/_mask/test_mask.py b/tests/integration/preprocessor/_mask/test_mask.py index 88ef081ac3..c255878941 100644 --- a/tests/integration/preprocessor/_mask/test_mask.py +++ b/tests/integration/preprocessor/_mask/test_mask.py @@ -3,6 +3,7 @@ Integration tests for the :func:`esmvalcore.preprocessor._mask` module. """ +import re from pathlib import Path import dask.array as da @@ -10,7 +11,8 @@ import iris.fileformats import numpy as np import pytest -from iris.coords import AuxCoord +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube, CubeList from esmvalcore.preprocessor import ( PreprocessorFile, @@ -19,7 +21,11 @@ mask_landsea, mask_landseaice, ) +from esmvalcore.preprocessor._mask import _get_fillvalues_mask from tests import assert_array_equal +from tests.unit.preprocessor._mask.test_mask_multimodel import ( + PreprocessorFile as MockedPreprocessorFile, +) class Test: @@ -461,3 +467,49 @@ def test_mask_fillvalues_min_value_none(self, mocker, lazy): else: msg = f"Invalid filename: {product.filename}" raise AssertionError(msg) + + def test_mask_fillvalues_unsupported_ndim_fail(self) -> None: + """Test ``mask_fillvalues`` with unsupported data dimensions.""" + cube = iris.cube.Cube( + self.mock_data[:, 0, 0], + dim_coords_and_dims=[(self.times, 0)], + ) + products = [MockedPreprocessorFile(CubeList([cube]), "A")] + msg = r"Unable to handle 0 dimensional data" + with pytest.raises(NotImplementedError, match=re.escape(msg)): + mask_fillvalues( + products, + threshold_fraction=0.5, + min_value=None, + time_window=1, + ) + + @pytest.mark.parametrize( + "threshold_fraction", + [-1000.0, -0.1, 1.1, 1000.0], + ) + def test_get_fillvalues_mask_invalid_threshold_fraction_fail( + self, + threshold_fraction: float, + ) -> None: + msg = r"should be between 0 and 1.0" + with pytest.raises(ValueError, match=re.escape(msg)): + _get_fillvalues_mask( + Cube(0.0), + threshold_fraction=threshold_fraction, + min_value=0.0, + time_window=1, + ) + + def test_get_fillvalues_mask_invalid_time_window_fail(self) -> None: + time_coord = DimCoord([0.0], standard_name="time") + cube = Cube([0.0], dim_coords_and_dims=[(time_coord, 0)]) + + msg = r"Time window (in time units) larger than total time span. Stop." + with pytest.raises(ValueError, match=re.escape(msg)): + _get_fillvalues_mask( + cube, + threshold_fraction=0.5, + min_value=0.0, + time_window=2, + ) diff --git a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py index f1f5c93ac7..be03ddeb8d 100644 --- a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py +++ b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py @@ -4,6 +4,8 @@ :func:`esmvalcore.preprocessor._supplementary_vars` module. """ +import re + import dask.array as da import iris import iris.fileformats @@ -356,3 +358,12 @@ def test_find_matching_coord_dims_match(self): self.lats, self.cube, ) == (1,) + + def test_add_cell_measure_invalid_measure_fail(self) -> None: + msg = r"measure name must be 'area' or 'volume'" + with pytest.raises(ValueError, match=re.escape(msg)): + add_cell_measure( + iris.cube.Cube(0.0), + self.fx_area, + measure="invalid", # type: ignore[arg-type] + ) diff --git a/tests/integration/test_task.py b/tests/integration/test_task.py index 1f8e3db1dd..4329ec4310 100644 --- a/tests/integration/test_task.py +++ b/tests/integration/test_task.py @@ -1,14 +1,20 @@ +from __future__ import annotations + import multiprocessing import os +import re import shutil from contextlib import contextmanager from functools import partial from multiprocessing.pool import ThreadPool +from typing import TYPE_CHECKING +from unittest.mock import sentinel import pytest import yaml import esmvalcore +import esmvalcore._task from esmvalcore._task import ( BaseTask, DiagnosticError, @@ -19,6 +25,11 @@ ) from esmvalcore.config._diagnostics import DIAGNOSTICS +if TYPE_CHECKING: + from pathlib import Path + + from pytest_mock import MockerFixture + class MockBaseTask(BaseTask): def _run(self, input_files): @@ -374,3 +385,24 @@ def _run(self, input_filesi=None): with pytest.raises(DiagnosticError) as err_mssg: task.run() assert diag_text[1] in str(err_mssg.value) + + +def test_initialize_provenance_already_initialized_fail() -> None: + task = BaseTask() # type: ignore[abstract] + task.activity = sentinel.activity + + msg = r"Provenance of BaseTask('') already initialized" + with pytest.raises(ValueError, match=re.escape(msg)): + task.initialize_provenance(sentinel.recipe_entity) + + +def test_diagnostic_task_script_not_x_fail( + tmp_path: Path, + mocker: MockerFixture, +) -> None: + mock_os = mocker.patch.object(esmvalcore._task, "os", create_autospec=True) + mock_os.access.return_value = False + diag_script = tmp_path / "diag_cow.c" + msg = r"Cannot execute script" + with pytest.raises(DiagnosticError, match=re.escape(msg)): + _get_single_diagnostic_task(tmp_path, diag_script) diff --git a/tests/unit/experimental/test_recipe_info.py b/tests/unit/experimental/test_recipe_info.py index b085ace4b4..d61464fbad 100644 --- a/tests/unit/experimental/test_recipe_info.py +++ b/tests/unit/experimental/test_recipe_info.py @@ -1,7 +1,14 @@ +from __future__ import annotations + +import re import textwrap from pathlib import Path +from typing import TYPE_CHECKING + +import pytest import esmvalcore +import esmvalcore.experimental.recipe_metadata from esmvalcore.config._diagnostics import TAGS, Diagnostics from esmvalcore.experimental.recipe_info import ( Contributor, @@ -10,6 +17,9 @@ Reference, ) +if TYPE_CHECKING: + from pytest_mock import MockerFixture + DIAGNOSTICS = Diagnostics(Path(__file__).parent) @@ -95,3 +105,53 @@ def test_recipe_info_str(): ### Maintainers """).lstrip() assert str(recipe) == text + + +def test_reference_multiple_entries_fail(tmp_path: Path) -> None: + bib_file = tmp_path / "bib.bib" + bib_file.write_text( + textwrap.dedent( + """ + @article{a, + title = {a}, + author = {a}, + year = 2020, + } + @article{b, + title = {b}, + author = {b}, + year = 2020, + } + """, + ), + ) + msg = r"Reference cannot handle bibtex files with more than 1 entry" + with pytest.raises(NotImplementedError, match=re.escape(msg)): + Reference(str(bib_file)) + + +def test_render_fail( + monkeypatch: pytest.MonkeyPatch, + mocker: MockerFixture, +) -> None: + mock_pybtex = mocker.patch.object( + esmvalcore.experimental.recipe_metadata, + "pybtex", + create_autospec=True, + ) + mock_pybtex.plugin.find_plugin.return_value.return_value.format_entry.side_effect = ValueError( + "err", + ) + monkeypatch.setattr( + esmvalcore.experimental.recipe_metadata, + "DIAGNOSTICS", + DIAGNOSTICS, + ) + reference = Reference.from_tag("doe2021") + + msg = r"Could not render 'doe2021': err" + with pytest.raises( + esmvalcore.experimental.recipe_metadata.RenderError, + match=re.escape(msg), + ): + reference.render() diff --git a/tests/unit/preprocessor/_area/test_area.py b/tests/unit/preprocessor/_area/test_area.py index 8299d3e461..080d18c75a 100644 --- a/tests/unit/preprocessor/_area/test_area.py +++ b/tests/unit/preprocessor/_area/test_area.py @@ -1,5 +1,6 @@ """Unit tests for the :func:`esmvalcore.preprocessor._area` module.""" +import re import unittest from pathlib import Path @@ -303,8 +304,17 @@ def test_extract_named_region(self): # test for expected failures: with self.assertRaises(ValueError): extract_named_regions(region_cube, "reg_A") + with self.assertRaises(ValueError): extract_named_regions(region_cube, ["region1", "reg_A"]) + def test_extract_named_region_invalid_regions_type_fail(self) -> None: + msg = r'Regions "123" is not an acceptable format.' + with pytest.raises(TypeError, match=re.escape(msg)): + extract_named_regions(Cube(0), 123) # type: ignore[arg-type] + msg = r'Regions ".*" is not an acceptable format.' + with pytest.raises(TypeError, match=msg): + extract_named_regions(Cube(0), {"region1": "region2"}) # type: ignore[arg-type] + def create_irregular_grid_cube(data, lons, lats): """Create test cube on irregular grid.""" diff --git a/tests/unit/preprocessor/_multimodel/test_multimodel.py b/tests/unit/preprocessor/_multimodel/test_multimodel.py index 9d7614d100..223ff454c4 100644 --- a/tests/unit/preprocessor/_multimodel/test_multimodel.py +++ b/tests/unit/preprocessor/_multimodel/test_multimodel.py @@ -1,5 +1,6 @@ """Unit test for :func:`esmvalcore.preprocessor._multimodel`.""" +import re from datetime import datetime from unittest import mock @@ -1727,3 +1728,37 @@ def test_differing_multi_model_settings(): msg = r"Unable to combine differing multi-dataset settings for a and b" with pytest.raises(ValueError, match=msg): _check_multi_model_settings(products) + + +def test_multi_model_statistics_invalid_input_type_fail() -> None: + msg = r"Input type for multi_model_statistics not understood." + with pytest.raises(ValueError, match=re.escape(msg)): + mm.multi_model_statistics([0, 0], "full", []) + + +def test_compute_eager_concatenate_fail() -> None: + """Test that ``_compute_eager`` raises on concatenation failure.""" + # Use small cubes (no slicing) but mock concatenate_cube to fail + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(3)) + + # Create a mock result that will fail on concatenate + mock_result = mock.Mock() + mock_result.coords = mock.Mock(return_value=[]) + + # Test with concatenate_cube raising an exception + with mock.patch.object( + mm, + "_compute", + return_value=mock_result, + ): + with mock.patch.object( + iris.cube.CubeList, + "concatenate_cube", + side_effect=iris.exceptions.ConcatenateError("test error"), + ): + msg = ( + r"Multi-model statistics failed to concatenate results into a " + r"single array. This happened for operator" + ) + with pytest.raises(ValueError, match=re.escape(msg)): + mm._compute_eager(cubes, operator="mean") diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py index 90a291209e..eda8682d8d 100644 --- a/tests/unit/preprocessor/_volume/test_volume.py +++ b/tests/unit/preprocessor/_volume/test_volume.py @@ -819,6 +819,15 @@ def test_extract_trajectory(self): expected = np.ones((3, 2)) self.assert_array_equal(result.data, expected) + def test_extract_trajectory_different_lengths(self) -> None: + """Test that ValueError is raised when lat/lon have different lengths.""" + with self.assertRaises(ValueError) as err: + extract_trajectory(self.grid_3d, [1.5, 2.5], [2.0], 2) + self.assertEqual( + "Longitude & Latitude coordinates have different lengths", + str(err.exception), + ) + def test__get_first_unmasked_data(self): """Test to get first unmasked value of an array along an axis.""" (z_axis,) = self.grid_4d_2.coord_dims( diff --git a/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py b/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py index 269880c601..a2ededcedf 100644 --- a/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py +++ b/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py @@ -1,5 +1,7 @@ """Unit tests for :mod:`esmvalcore.preprocessor._weighting`.""" +import re + import iris import iris.fileformats import numpy as np @@ -118,6 +120,12 @@ def test_get_land_fraction(cube, out, err): ] +def test_weighting_landsea_fraction_invalid_area_type_fail() -> None: + msg = r"Expected 'land' or 'sea' for area_type" + with pytest.raises(TypeError, match=re.escape(msg)): + weighting.weighting_landsea_fraction(CUBE_3, "invalid") # type: ignore[arg-type] + + @pytest.mark.parametrize( ("cube", "area_type", "out"), WEIGHTING_LANDSEA_FRACTION, diff --git a/tests/unit/task/test_diagnostic_task.py b/tests/unit/task/test_diagnostic_task.py index cb6047bc11..27cc661d48 100644 --- a/tests/unit/task/test_diagnostic_task.py +++ b/tests/unit/task/test_diagnostic_task.py @@ -1,7 +1,11 @@ +from __future__ import annotations + import copy import logging +import re import stat from pathlib import Path +from typing import TYPE_CHECKING import pytest import yaml @@ -10,6 +14,9 @@ from esmvalcore._task import DiagnosticError, write_ncl_settings from esmvalcore.config._diagnostics import TagsManager +if TYPE_CHECKING: + from pytest_mock import MockerFixture + def test_write_ncl_settings(tmp_path): """Test minimally write_ncl_settings().""" @@ -337,3 +344,55 @@ def test_collect_provenance_ancestor_hint(mocker, caplog, diagnostic_task): ["Valid ancestor files", "xyz.nc"], ], ) + + +def test_run_fails_with_nonzero_returncode( + mocker: MockerFixture, + tmp_path: Path, +) -> None: + """Test that DiagnosticError is raised when script fails.""" + mocker.patch.object( + esmvalcore._task.DiagnosticTask, + "_initialize_cmd", + autospec=True, + ) + mocker.patch.object( + esmvalcore._task.DiagnosticTask, + "_initialize_env", + autospec=True, + ) + + settings = { + "run_dir": str(tmp_path / "run_dir"), + "plot_dir": str(tmp_path / "plot_dir"), + "work_dir": str(tmp_path / "work_dir"), + "profile_diagnostic": False, + } + + task = esmvalcore._task.DiagnosticTask( + "test.py", + settings, + output_dir=str(tmp_path), + name="test-task", + ) + + # Mock subprocess.Popen to return a process with non-zero return code + mock_process = mocker.Mock() + mock_process.pid = 12345 + mock_process.poll.return_value = ( + 1 # Non-zero return code indicates failure + ) + mock_process.stdout.read.return_value = b"Error occurred\n" + + mocker.patch.object( + esmvalcore._task.subprocess, + "Popen", + return_value=mock_process, + ) + + # Mock resource_usage_logger to avoid threading issues in tests + mocker.patch.object(esmvalcore._task, "resource_usage_logger") + + msg = r"Diagnostic script test.py failed with return code 1" + with pytest.raises(DiagnosticError, match=re.escape(msg)): + task._run(input_files=[]) diff --git a/tests/unit/test_version.py b/tests/unit/test_version.py index 3dca6d14ba..cb4d42cc9b 100644 --- a/tests/unit/test_version.py +++ b/tests/unit/test_version.py @@ -1,9 +1,30 @@ """Test that esmvalcore.__version__ returns a version number.""" +from __future__ import annotations + +import importlib import re +from importlib.metadata import PackageNotFoundError +from typing import TYPE_CHECKING + +import pytest import esmvalcore +import esmvalcore._version + +if TYPE_CHECKING: + from pytest_mock import MockerFixture def test_version(): assert re.match(r"^\d+\.\d+\.\d+\S*$", esmvalcore.__version__) + + +def test_version_package_not_found_fail(mocker: MockerFixture) -> None: + mocker.patch( + "importlib.metadata.version", + side_effect=PackageNotFoundError("ESMValCore"), + ) + msg = r"ESMValCore package not found" + with pytest.raises(PackageNotFoundError, match=re.escape(msg)): + importlib.reload(esmvalcore._version)