diff --git a/linopy/__init__.py b/linopy/__init__.py index b1dc33b9..d9f4b64e 100644 --- a/linopy/__init__.py +++ b/linopy/__init__.py @@ -14,8 +14,13 @@ import linopy.monkey_patch_xarray # noqa: F401 from linopy.common import align from linopy.config import options -from linopy.constants import EQUAL, GREATER_EQUAL, LESS_EQUAL -from linopy.constraints import Constraint, Constraints +from linopy.constants import EQUAL, GREATER_EQUAL, LESS_EQUAL, PerformanceWarning +from linopy.constraints import ( + Constraint, + ConstraintBase, + Constraints, + CSRConstraint, +) from linopy.expressions import LinearExpression, QuadraticExpression, merge from linopy.io import read_netcdf from linopy.model import Model, Variable, Variables, available_solvers @@ -29,9 +34,12 @@ pass __all__ = ( - "Constraint", + "CSRConstraint", + "ConstraintBase", "Constraints", + "Constraint", "EQUAL", + "PerformanceWarning", "GREATER_EQUAL", "LESS_EQUAL", "LinearExpression", diff --git a/linopy/common.py b/linopy/common.py index 09f67355..a90a5fab 100644 --- a/linopy/common.py +++ b/linopy/common.py @@ -10,7 +10,7 @@ import operator import os from collections.abc import Callable, Generator, Hashable, Iterable, Sequence -from functools import partial, reduce, wraps +from functools import cached_property, partial, reduce, wraps from pathlib import Path from typing import TYPE_CHECKING, Any, Generic, TypeVar, overload from warnings import warn @@ -19,6 +19,7 @@ import pandas as pd import polars as pl from numpy import arange, signedinteger +from polars.datatypes import DataTypeClass from xarray import DataArray, Dataset, apply_ufunc, broadcast from xarray import align as xr_align from xarray.core import dtypes, indexing @@ -40,7 +41,7 @@ ) if TYPE_CHECKING: - from linopy.constraints import Constraint + from linopy.constraints import ConstraintBase from linopy.expressions import LinearExpression, QuadraticExpression from linopy.variables import Variable @@ -327,7 +328,7 @@ def check_has_nulls(df: pd.DataFrame, name: str) -> None: raise ValueError(f"Fields {name} contains nan's in field(s) {fields}") -def infer_schema_polars(ds: Dataset) -> dict[Hashable, pl.DataType]: +def infer_schema_polars(ds: Dataset) -> dict[str, DataTypeClass]: """ Infer the polars data schema from a xarray dataset. @@ -339,21 +340,22 @@ def infer_schema_polars(ds: Dataset) -> dict[Hashable, pl.DataType]: ------- dict: A dictionary mapping column names to their corresponding Polars data types. """ - schema = {} + schema: dict[str, DataTypeClass] = {} np_major_version = int(np.__version__.split(".")[0]) use_int32 = os.name == "nt" and np_major_version < 2 for name, array in ds.items(): + name = str(name) if np.issubdtype(array.dtype, np.integer): schema[name] = pl.Int32 if use_int32 else pl.Int64 elif np.issubdtype(array.dtype, np.floating): - schema[name] = pl.Float64 # type: ignore + schema[name] = pl.Float64 elif np.issubdtype(array.dtype, np.bool_): - schema[name] = pl.Boolean # type: ignore + schema[name] = pl.Boolean elif np.issubdtype(array.dtype, np.object_): - schema[name] = pl.Object # type: ignore + schema[name] = pl.Object else: - schema[name] = pl.Utf8 # type: ignore - return schema # type: ignore + schema[name] = pl.Utf8 + return schema def to_polars(ds: Dataset, **kwargs: Any) -> pl.DataFrame: @@ -429,7 +431,7 @@ def filter_nulls_polars(df: pl.DataFrame) -> pl.DataFrame: if "labels" in df.columns: cond.append(pl.col("labels").ne(-1)) - cond = reduce(operator.and_, cond) # type: ignore + cond = reduce(operator.and_, cond) # type: ignore[arg-type] return df.filter(cond) @@ -554,7 +556,7 @@ def fill_missing_coords( return ds -T = TypeVar("T", Dataset, "Variable", "LinearExpression", "Constraint") +T = TypeVar("T", Dataset, "Variable", "LinearExpression", "ConstraintBase") @overload @@ -583,10 +585,10 @@ def iterate_slices( @overload def iterate_slices( - ds: Constraint, + ds: ConstraintBase, slice_size: int | None = 10_000, slice_dims: list | None = None, -) -> Generator[Constraint, None, None]: ... +) -> Generator[ConstraintBase, None, None]: ... def iterate_slices( @@ -655,7 +657,7 @@ def iterate_slices( start = i * chunk_size end = min(start + chunk_size, size_of_leading_dim) slice_dict = {leading_dim: slice(start, end)} - yield ds.isel(slice_dict) + yield ds.isel(slice_dict) # type: ignore[attr-defined] def _remap(array: np.ndarray, mapping: np.ndarray) -> np.ndarray: @@ -939,6 +941,57 @@ def find_single(value: int) -> tuple[str, dict] | tuple[None, None]: raise ValueError("Array's with more than two dimensions is not supported") +class VariableLabelIndex: + """ + Index for O(1) mapping between variable labels and dense positions. + + Both arrays are computed lazily and cached: + - ``vlabels``: active variable labels in encounter order, shape (n_active_vars,) + - ``label_to_pos``: derived from vlabels; size _xCounter, maps label -> position (-1 if masked) + + Invalidated by clearing the instance ``__dict__`` when variables are added or removed. + """ + + def __init__(self, variables: Any) -> None: + self._variables = variables + + @cached_property + def vlabels(self) -> np.ndarray: + """Active variable labels in encounter order, shape (n_active_vars,).""" + label_lists = [] + for _, var in self._variables.items(): + labels = var.labels.values.ravel() + mask = labels != -1 + label_lists.append(labels[mask]) + return ( + np.concatenate(label_lists) if label_lists else np.array([], dtype=np.intp) + ) + + @cached_property + def label_to_pos(self) -> np.ndarray: + """ + Mapping from variable label to dense position, shape (_xCounter,). + + Position i in the active variable array corresponds to label vlabels[i]. + Masked or unused labels map to -1. + """ + vlabels = self.vlabels + n = self._variables.model._xCounter + label_to_pos = np.full(n, -1, dtype=np.intp) + label_to_pos[vlabels] = np.arange(len(vlabels), dtype=np.intp) + return label_to_pos + + @property + def n_active_vars(self) -> int: + """Number of active (non-masked) variables.""" + return len(self.vlabels) + + def invalidate(self) -> None: + """Clear cached arrays so they are recomputed on next access.""" + self.__dict__.pop("vlabels", None) + self.__dict__.pop("label_to_pos", None) + + def get_label_position( obj: Any, values: int | np.ndarray, @@ -1306,7 +1359,7 @@ def align( "Variable", "LinearExpression", "QuadraticExpression", - "Constraint", + "ConstraintBase", ) @@ -1324,7 +1377,7 @@ def __getitem__( # expand the indexer so we can handle Ellipsis labels = indexing.expanded_indexer(key, self.object.ndim) key = dict(zip(self.object.dims, labels)) - return self.object.sel(key) + return self.object.sel(key) # type: ignore[attr-defined] class EmptyDeprecationWrapper: @@ -1358,6 +1411,60 @@ def __call__(self) -> bool: return self.value +def coords_to_dataset_vars(coords: list[pd.Index]) -> dict[str, DataArray]: + """ + Serialize a list of pd.Index (including MultiIndex) to a DataArray dict. + + Suitable for embedding coordinate metadata as plain data variables in a + Dataset that has its own unrelated dimensions (e.g. CSR netcdf format). + Reconstruct with :func:`coords_from_dataset`. + """ + data_vars: dict[str, DataArray] = {} + for c in coords: + if isinstance(c, pd.MultiIndex): + for level_name, level_values in zip(c.names, c.levels): + data_vars[f"_coord_{c.name}_level_{level_name}"] = DataArray( + np.array(level_values), + dims=[f"_coorddim_{c.name}_level_{level_name}"], + ) + data_vars[f"_coord_{c.name}_codes"] = DataArray( + np.array(c.codes).T, + dims=[f"_coorddim_{c.name}", f"_coorddim_{c.name}_nlevels"], + ) + else: + data_vars[f"_coord_{c.name}"] = DataArray( + np.array(c), dims=[f"_coorddim_{c.name}"] + ) + return data_vars + + +def coords_from_dataset(ds: Dataset, coord_dims: list[str]) -> list[pd.Index]: + """ + Deserialize a list of pd.Index (including MultiIndex) from a Dataset. + + Reconstructs coordinates previously serialized by :func:`coords_to_dataset_vars`. + """ + coords = [] + for d in coord_dims: + if f"_coord_{d}_codes" in ds: + codes_2d = ds[f"_coord_{d}_codes"].values.T + level_names = [ + str(k)[len(f"_coord_{d}_level_") :] + for k in ds + if str(k).startswith(f"_coord_{d}_level_") + ] + arrays = [ + ds[f"_coord_{d}_level_{ln}"].values[codes_2d[i]] + for i, ln in enumerate(level_names) + ] + mi = pd.MultiIndex.from_arrays(arrays, names=level_names) + mi.name = d + coords.append(mi) + else: + coords.append(pd.Index(ds[f"_coord_{d}"].values, name=d)) + return coords + + def is_constant(x: SideLike) -> bool: """ Check if the given object is a constant type or an expression type without diff --git a/linopy/constants.py b/linopy/constants.py index f3c05a55..12d1f0a8 100644 --- a/linopy/constants.py +++ b/linopy/constants.py @@ -18,6 +18,11 @@ GREATER_EQUAL = ">=" LESS_EQUAL = "<=" + +class PerformanceWarning(UserWarning): + """Warning raised when an operation triggers expensive Dataset reconstruction.""" + + long_EQUAL = "==" short_GREATER_EQUAL = ">" short_LESS_EQUAL = "<" @@ -211,9 +216,11 @@ def process(cls, status: str, termination_condition: str) -> "Status": @classmethod def from_termination_condition( - cls, termination_condition: Union["TerminationCondition", str] + cls, termination_condition: Union["TerminationCondition", str, None] ) -> "Status": - termination_condition = TerminationCondition.process(termination_condition) + termination_condition = TerminationCondition.process( + termination_condition if termination_condition is not None else "unknown" + ) solver_status = SolverStatus.from_termination_condition(termination_condition) return cls(solver_status, termination_condition) diff --git a/linopy/constraints.py b/linopy/constraints.py index d3ebef19..776d8b12 100644 --- a/linopy/constraints.py +++ b/linopy/constraints.py @@ -7,6 +7,8 @@ from __future__ import annotations import functools +import warnings +from abc import ABC, abstractmethod from collections.abc import Callable, Hashable, ItemsView, Iterator, Sequence from dataclasses import dataclass from itertools import product @@ -31,10 +33,13 @@ from linopy.common import ( LabelPositionIndex, LocIndexer, + VariableLabelIndex, align_lines_by_delimiter, assign_multiindex_safe, check_has_nulls, check_has_nulls_polars, + coords_from_dataset, + coords_to_dataset_vars, filter_nulls_polars, format_string_as_variable_name, generate_indices_for_printout, @@ -60,6 +65,7 @@ HELPER_DIMS, LESS_EQUAL, TERM_DIM, + PerformanceWarning, SIGNS_pretty, ) from linopy.types import ( @@ -73,6 +79,7 @@ if TYPE_CHECKING: from linopy.model import Model + FILL_VALUE = {"labels": -1, "rhs": np.nan, "coeffs": 0, "vars": -1, "sign": "="} @@ -96,199 +103,237 @@ def _conwrap(con: Constraint, *args: Any, **kwargs: Any) -> Constraint: return _conwrap -def _con_unwrap(con: Constraint | Dataset) -> Dataset: - return con.data if isinstance(con, Constraint) else con +def _con_unwrap(con: ConstraintBase | Dataset) -> Dataset: + return con.data if isinstance(con, ConstraintBase) else con -class Constraint: +class ConstraintBase(ABC): """ - Projection to a single constraint in a model. + Abstract base class for Constraint and MutableConstraint. - The Constraint class is a subclass of xr.DataArray hence most xarray - functions can be applied to it. + Provides all read-only properties and methods shared by both the immutable + Constraint (CSR-backed) and the mutable MutableConstraint (Dataset-backed). """ - __slots__ = ("_data", "_model", "_assigned") - _fill_value = FILL_VALUE - def __init__( - self, - data: Dataset, - model: Model, - name: str = "", - skip_broadcast: bool = False, - ) -> None: - """ - Initialize the Constraint. + @property + @abstractmethod + def data(self) -> Dataset: + """Get the underlying xarray Dataset representation.""" - Parameters - ---------- - labels : xarray.DataArray - labels of the constraint. - model : linopy.Model - Underlying model. - name : str - Name of the constraint. - """ + @property + @abstractmethod + def model(self) -> Model: + """Get the model reference.""" - from linopy.model import Model + @property + @abstractmethod + def name(self) -> str: + """Get the constraint name.""" - if not isinstance(data, Dataset): - raise ValueError(f"data must be a Dataset, got {type(data)}") + @property + @abstractmethod + def is_assigned(self) -> bool: + """Whether the constraint has been assigned labels by the model.""" - if not isinstance(model, Model): - raise ValueError(f"model must be a Model, got {type(model)}") + @property + @abstractmethod + def labels(self) -> DataArray: + """Get the labels DataArray.""" - # check that `labels`, `lower` and `upper`, `sign` and `mask` are in data - for attr in ("coeffs", "vars", "sign", "rhs"): - if attr not in data: - raise ValueError(f"missing '{attr}' in data") + @property + @abstractmethod + def coeffs(self) -> DataArray: + """Get the LHS coefficients DataArray.""" - data = data.assign_attrs(name=name) + @property + @abstractmethod + def vars(self) -> DataArray: + """Get the LHS variable labels DataArray.""" - if not skip_broadcast: - (data,) = xr.broadcast(data, exclude=[TERM_DIM]) + @property + @abstractmethod + def sign(self) -> DataArray: + """Get the constraint sign DataArray.""" - self._assigned = "labels" in data - self._data = data - self._model = model + @property + @abstractmethod + def rhs(self) -> DataArray: + """Get the RHS DataArray.""" + + @property + @abstractmethod + def dual(self) -> DataArray: + """Get the dual values DataArray.""" + + @dual.setter + @abstractmethod + def dual(self, value: DataArray) -> None: + """Set the dual values DataArray.""" + + @abstractmethod + def has_variable(self, variable: variables.Variable) -> bool: + """Check if the constraint references any of the given variable labels.""" + + @abstractmethod + def sanitize_zeros(self) -> ConstraintBase: + """Remove terms with zero or near-zero coefficients.""" + + @abstractmethod + def sanitize_missings(self) -> ConstraintBase: + """Mask out rows where all variables are missing (-1).""" + + @abstractmethod + def sanitize_infinities(self) -> ConstraintBase: + """Mask out rows with invalid infinite RHS values.""" + + @abstractmethod + def to_polars(self) -> pl.DataFrame: + """Convert constraint to a polars DataFrame.""" + + @abstractmethod + def freeze(self) -> CSRConstraint: + """Return an immutable Constraint (CSR-backed).""" + + @abstractmethod + def mutable(self) -> Constraint: + """Return a mutable MutableConstraint.""" + + @abstractmethod + def to_matrix_with_rhs( + self, label_index: VariableLabelIndex + ) -> tuple[scipy.sparse.csr_array, np.ndarray, np.ndarray, np.ndarray]: + """ + Return (csr, con_labels, b, sense) in one pass. + + Avoids computing the CSR matrix twice when both the matrix and + the RHS/sense vectors are needed. + """ def __getitem__( self, selector: str | int | slice | list | tuple | dict ) -> Constraint: """ Get selection from the constraint. - This is a wrapper around the xarray __getitem__ method. It returns a - new object with the selected data. + Returns a MutableConstraint with the selected data. """ - data = Dataset({k: self.data[k][selector] for k in self.data}, attrs=self.attrs) - return self.__class__(data, self.model, self.name) + data = Dataset( + {k: self.data[k][selector] for k in self.data}, attrs=self.data.attrs + ) + return Constraint(data, self.model, self.name) @property def attrs(self) -> dict[str, Any]: - """ - Get the attributes of the constraint. - """ + """Get the attributes of the constraint.""" return self.data.attrs @property def coords(self) -> DatasetCoordinates: - """ - Get the coordinates of the constraint. - """ + """Get the coordinates of the constraint.""" return self.data.coords @property def indexes(self) -> Indexes: - """ - Get the indexes of the constraint. - """ + """Get the indexes of the constraint.""" return self.data.indexes @property def dims(self) -> Frozen[Hashable, int]: - """ - Get the dimensions of the constraint. - """ + """Get the dimensions of the constraint.""" return self.data.dims @property def sizes(self) -> Frozen[Hashable, int]: - """ - Get the sizes of the constraint. - """ + """Get the sizes of the constraint.""" return self.data.sizes @property def nterm(self) -> int: - """ - Get the number of terms in the constraint. - """ - return self.lhs.nterm + """Get the number of terms in the constraint.""" + return self.data.sizes.get(TERM_DIM, 1) @property def ndim(self) -> int: - """ - Get the number of dimensions of the constraint. - """ + """Get the number of dimensions of the constraint.""" return self.rhs.ndim @property def shape(self) -> tuple[int, ...]: - """ - Get the shape of the constraint. - """ + """Get the shape of the constraint.""" return self.rhs.shape @property def size(self) -> int: - """ - Get the size of the constraint. - """ + """Get the size of the constraint.""" return self.rhs.size @property - def loc(self) -> LocIndexer: - return LocIndexer(self) - - @property - def data(self) -> Dataset: + def ncons(self) -> int: """ - Get the underlying DataArray. + Get the number of active constraints (non-masked, with at least one valid variable). """ - return self._data + labels = self.labels.values + vars_arr = self.vars.values + if labels.ndim == 0: + return int(labels != FILL_VALUE["labels"] and (vars_arr != -1).any()) + return int( + ((labels != FILL_VALUE["labels"]) & (vars_arr != -1).any(axis=-1)).sum() + ) @property - def labels(self) -> DataArray: - """ - Get the labels of the constraint. - """ - return self.data.get("labels", DataArray([])) + def coord_dims(self) -> tuple[Hashable, ...]: + return tuple(k for k in self.dims if k not in HELPER_DIMS) @property - def model(self) -> Model: - """ - Get the model of the constraint. - """ - return self._model + def coord_sizes(self) -> dict[Hashable, int]: + return {k: v for k, v in self.sizes.items() if k not in HELPER_DIMS} @property - def name(self) -> str: - """ - Return the name of the constraint. - """ - return self.attrs["name"] + def coord_names(self) -> list[str]: + """Get the names of the coordinates.""" + return get_dims_with_index_levels(self.data, self.coord_dims) @property - def coord_dims(self) -> tuple[Hashable, ...]: - return tuple(k for k in self.dims if k not in HELPER_DIMS) + def type(self) -> str: + """Get the type string of the constraint.""" + return "Constraint" if self.is_assigned else "Constraint (unassigned)" @property - def coord_sizes(self) -> dict[Hashable, int]: - return {k: v for k, v in self.sizes.items() if k not in HELPER_DIMS} + def term_dim(self) -> str: + """Return the term dimension of the constraint.""" + return TERM_DIM @property - def coord_names(self) -> list[str]: + def mask(self) -> DataArray | None: """ - Get the names of the coordinates. + Get the mask of the constraint. + + The mask indicates on which coordinates the constraint is enabled + (True) and disabled (False). """ - return get_dims_with_index_levels(self.data, self.coord_dims) + if self.is_assigned: + result: DataArray = self.labels != FILL_VALUE["labels"] # type: ignore[assignment] + return result.astype(bool) + return None @property - def is_assigned(self) -> bool: - return self._assigned + def lhs(self) -> expressions.LinearExpression: + """Get the left-hand-side linear expression of the constraint.""" + data = self.data[["coeffs", "vars"]].rename({self.term_dim: TERM_DIM}) + return expressions.LinearExpression(data, self.model) + + def __contains__(self, value: Any) -> bool: + return self.data.__contains__(value) def __repr__(self) -> str: - """ - Print the constraint arrays. - """ + """Print the constraint arrays.""" max_lines = options["display_max_rows"] dims = list(self.coord_sizes.keys()) ndim = len(dims) dim_names = self.coord_names dim_sizes = list(self.coord_sizes.values()) - size = np.prod(dim_sizes) # that the number of theoretical printouts + size = np.prod(dim_sizes) masked_entries = (~self.mask).sum().values if self.mask is not None else 0 lines = [] @@ -318,87 +363,695 @@ def __repr__(self) -> str: lines.append(line) lines = align_lines_by_delimiter(lines, list(SIGNS_pretty.values())) - shape_str = ", ".join(f"{d}: {s}" for d, s in zip(dim_names, dim_sizes)) + shape_str = ", ".join(f"{d}: {s}" for d, s in zip(dim_names, dim_sizes)) + mask_str = f" - {masked_entries} masked entries" if masked_entries else "" + underscore = "-" * (len(shape_str) + len(mask_str) + len(header_string) + 4) + lines.insert(0, f"{header_string} [{shape_str}]{mask_str}:\n{underscore}") + elif size == 1: + expr = print_single_expression( + self.coeffs.values, self.vars.values, 0, self.model + ) + lines.append( + f"{header_string}\n{'-' * len(header_string)}\n{expr} {SIGNS_pretty[self.sign.item()]} {self.rhs.item()}" + ) + else: + lines.append(f"{header_string}\n{'-' * len(header_string)}\n") + + return "\n".join(lines) + + def print(self, display_max_rows: int = 20, display_max_terms: int = 20) -> None: + """ + Print the linear expression. + + Parameters + ---------- + display_max_rows : int + Maximum number of rows to be displayed. + display_max_terms : int + Maximum number of terms to be displayed. + """ + with options as opts: + opts.set_value( + display_max_rows=display_max_rows, display_max_terms=display_max_terms + ) + print(self) + + @property + def flat(self) -> pd.DataFrame: + """ + Convert the constraint to a pandas DataFrame. + + The resulting DataFrame represents a long table format of the all + non-masked constraints with non-zero coefficients. It contains the + columns `labels`, `coeffs`, `vars`, `rhs`, `sign`. + """ + ds = self.data + + def mask_func(data: dict) -> pd.Series: + mask = (data["vars"] != -1) & (data["coeffs"] != 0) + if "labels" in data: + mask &= data["labels"] != -1 + return mask + + df = to_dataframe(ds, mask_func=mask_func) + + # Group repeated variables in the same constraint + agg_custom = {k: "first" for k in list(df.columns)} + agg_standards = dict(coeffs="sum", rhs="first", sign="first") + agg = {**agg_custom, **agg_standards} + df = df.groupby(["labels", "vars"], as_index=False).aggregate(agg) + check_has_nulls(df, name=f"{self.type} {self.name}") + return df + + def to_matrix( + self, label_index: VariableLabelIndex + ) -> tuple[scipy.sparse.csr_array, np.ndarray]: + """ + Construct a dense CSR matrix for this constraint. + + Only active (non-masked) rows are included. Column indices are dense + positions in the active variable array, as given by ``label_index``. + + Parameters + ---------- + label_index : VariableLabelIndex + Variable label index providing ``label_to_pos`` and ``n_active_vars``. + + Returns + ------- + csr : scipy.sparse.csr_array + Shape (n_active_cons, n_active_vars). + con_labels : np.ndarray + Active constraint labels in row order. + """ + label_to_pos = label_index.label_to_pos + labels_flat = self.labels.values.ravel() + vars_vals = self.vars.values + n_rows = len(labels_flat) + vars_2d = ( + vars_vals.reshape(n_rows, -1) + if n_rows > 0 + else vars_vals.reshape(0, max(1, vars_vals.size)) + ) + + # Single boolean mask combining both filter conditions + row_mask = (labels_flat != -1) & (vars_2d != -1).any(axis=1) + con_labels = labels_flat[row_mask] + vars_final = vars_2d[row_mask] + valid_final = vars_final != -1 + + coeffs_final = self.coeffs.values.ravel().reshape(vars_2d.shape)[row_mask] + + cols = label_to_pos[vars_final[valid_final]] + data = coeffs_final[valid_final] + + counts = valid_final.sum(axis=1) + n_active_cons = len(con_labels) + indptr = np.empty(n_active_cons + 1, dtype=np.int32) + indptr[0] = 0 + np.cumsum(counts, out=indptr[1:]) + + csr = scipy.sparse.csr_array( + (data, cols, indptr), shape=(n_active_cons, label_index.n_active_vars) + ) + csr.sum_duplicates() + return csr, con_labels + + def to_netcdf_ds(self) -> Dataset: + """Return a Dataset representation suitable for netcdf serialization.""" + return self.data + + iterate_slices = iterate_slices + + +class CSRConstraint(ConstraintBase): + """ + Frozen constraint backed by a CSR sparse matrix. + + Parameters + ---------- + csr : scipy.sparse.csr_array + Shape (n_flat, model._xCounter). Each row is a flat position in the + constraint grid (including masked/empty rows). + rhs : np.ndarray + Shape (n_flat,). Right-hand-side values. + sign : str or np.ndarray + Constraint sign. Either a single str ('=', '<=', '>=') for uniform + signs, or a per-row np.ndarray of sign strings for mixed signs. + coords : list of pd.Index + One index per coordinate dimension defining the constraint grid. + model : Model + The linopy model this constraint belongs to. + name : str + Name of the constraint. + cindex : int or None + Starting label assigned by the model. None if not yet assigned. + dual : np.ndarray or None + Shape (n_flat,). Dual values after solving, or None. + """ + + __slots__ = ( + "_csr", + "_con_labels", + "_rhs", + "_sign", + "_coords", + "_model", + "_name", + "_cindex", + "_dual", + ) + + def __init__( + self, + csr: scipy.sparse.csr_array, + con_labels: np.ndarray, + rhs: np.ndarray, + sign: str | np.ndarray, + coords: list[pd.Index], + model: Model, + name: str = "", + cindex: int | None = None, + dual: np.ndarray | None = None, + ) -> None: + self._csr = csr + self._con_labels = con_labels + self._rhs = rhs + self._sign = sign + self._coords = coords + self._model = model + self._name = name + self._cindex = cindex + self._dual = dual + + @property + def model(self) -> Model: + return self._model + + @property + def name(self) -> str: + return self._name + + @property + def is_assigned(self) -> bool: + return self._cindex is not None + + @property + def shape(self) -> tuple[int, ...]: + return tuple(len(c) for c in self._coords) + + @property + def full_size(self) -> int: + return int(np.prod(shape)) if (shape := self.shape) else 1 + + @property + def range(self) -> tuple[int, int]: + """Return the (start, end) label range of the constraint.""" + if self._cindex is None: + raise AttributeError("Constraint has not been assigned labels yet.") + return (self._cindex, self._cindex + self.full_size) + + @property + def ncons(self) -> int: + return self._csr.shape[0] + + @property + def attrs(self) -> dict[str, Any]: + d: dict[str, Any] = {"name": self._name} + if self._cindex is not None: + d["label_range"] = (self._cindex, self._cindex + self.full_size) + return d + + @property + def dims(self) -> Frozen[Hashable, int]: + d: dict[Hashable, int] = {c.name: len(c) for c in self._coords} + d[TERM_DIM] = self.nterm + return Frozen(d) + + @property + def active_positions(self) -> np.ndarray: + """Flat positions of active (non-masked) rows in the original coord shape.""" + if self._cindex is None: + return np.arange(self._csr.shape[0]) + return self._con_labels - self._cindex + + @property + def sizes(self) -> Frozen[Hashable, int]: + return self.dims + + @property + def indexes(self) -> Indexes: + return Indexes({c.name: c for c in self._coords}) + + @property + def nterm(self) -> int: + return int(np.diff(self._csr.indptr).max()) if self._csr.nnz > 0 else 1 + + @property + def coord_names(self) -> list[str]: + return [str(c.name) for c in self._coords] + + def _active_to_dataarray( + self, active_values: np.ndarray, fill: float | int | str = -1 + ) -> DataArray: + full = np.full(self.full_size, fill, dtype=active_values.dtype) + full[self.active_positions] = active_values + return DataArray(full.reshape(self.shape), coords=self._coords) + + @property + def labels(self) -> DataArray: + """Get labels DataArray, shape (*coord_dims).""" + if self._cindex is None: + return DataArray([]) + return self._active_to_dataarray(self._con_labels, fill=-1) + + @property + def coeffs(self) -> DataArray: + """Get coefficients DataArray, shape (*coord_dims, _term).""" + warnings.warn( + "Accessing .coeffs on a Constraint triggers full Dataset reconstruction. " + "Use .to_matrix() for efficient access.", + PerformanceWarning, + stacklevel=2, + ) + return self.data.coeffs + + @property + def vars(self) -> DataArray: + """Get variable labels DataArray, shape (*coord_dims, _term).""" + warnings.warn( + "Accessing .vars on a Constraint triggers full Dataset reconstruction. " + "Use .to_matrix() for efficient access.", + PerformanceWarning, + stacklevel=2, + ) + return self.data.vars + + @property + def sign(self) -> DataArray: + """Get sign DataArray.""" + if isinstance(self._sign, str): + return DataArray(np.full(self.shape, self._sign), coords=self._coords) + return self._active_to_dataarray(self._sign, fill="") + + @property + def rhs(self) -> DataArray: + """Get RHS DataArray, shape (*coord_dims).""" + return self._active_to_dataarray(self._rhs, fill=np.nan) + + @rhs.setter + def rhs(self, value: ExpressionLike | VariableLike | ConstantLike) -> None: + self._refreeze_after(lambda mc: setattr(mc, "rhs", value)) + + @property + def lhs(self) -> expressions.LinearExpression: + """Get LHS as LinearExpression (triggers Dataset reconstruction).""" + return self.mutable().lhs + + @lhs.setter + def lhs(self, value: ExpressionLike | VariableLike | ConstantLike) -> None: + self._refreeze_after(lambda mc: setattr(mc, "lhs", value)) + + def _refreeze_after(self, mutate: Callable[[Constraint], None]) -> None: + mc = self.mutable() + mutate(mc) + refrozen = CSRConstraint.from_mutable(mc, self._cindex) + self._csr = refrozen._csr + self._con_labels = refrozen._con_labels + self._rhs = refrozen._rhs + self._sign = refrozen._sign + self._coords = refrozen._coords + self._dual = None + + @property + @has_optimized_model + def dual(self) -> DataArray: + """Get dual values DataArray, shape (*coord_dims).""" + if self._dual is None: + raise AttributeError( + "Underlying is optimized but does not have dual values stored." + ) + return self._active_to_dataarray(self._dual, fill=np.nan) + + @dual.setter + def dual(self, value: DataArray) -> None: + """Set dual values from a DataArray aligned with the full coord shape.""" + vals = np.asarray(value).ravel() + self._dual = vals[self.active_positions] + + def _to_dataset(self, nterm: int) -> Dataset: + """ + Reconstruct labels/coeffs/vars Dataset from the CSR matrix. + + Parameters + ---------- + nterm : int + Number of terms per row (width of the dense term block). + + Returns + ------- + Dataset with variables ``labels``, ``coeffs``, ``vars``. + """ + csr = self._csr + counts = np.diff(csr.indptr) + shape = self.shape + full_size = self.full_size + + # Map active row i -> flat position in full shape via con_labels + active_positions = self.active_positions + coeffs_2d = np.zeros((full_size, nterm), dtype=csr.dtype) + vars_2d = np.full((full_size, nterm), -1, dtype=np.int64) + if csr.nnz > 0: + row_indices = np.repeat(active_positions, counts) + term_cols = np.arange(csr.nnz) - np.repeat(csr.indptr[:-1], counts) + # csr.indices are column positions into vlabels; map back to variable labels + vlabels = self._model.variables.label_index.vlabels + vars_2d[row_indices, term_cols] = vlabels[csr.indices] + coeffs_2d[row_indices, term_cols] = csr.data + + dim_names = self.coord_names + xr_coords = {c.name: c for c in self._coords} + dims_with_term = dim_names + [TERM_DIM] + coeffs_da = DataArray( + coeffs_2d.reshape(shape + (nterm,)), + coords=xr_coords, + dims=dims_with_term, + ) + vars_da = DataArray( + vars_2d.reshape(shape + (nterm,)), + coords=xr_coords, + dims=dims_with_term, + ) + ds = Dataset({"coeffs": coeffs_da, "vars": vars_da}) + if self._cindex is not None: + labels_flat = np.full(full_size, -1, dtype=np.int64) + labels_flat[active_positions] = self._con_labels + ds["labels"] = DataArray(labels_flat.reshape(shape), coords=self._coords) + return ds + + @property + def data(self) -> Dataset: + """Reconstruct the xarray Dataset from the CSR representation.""" + ds = self._to_dataset(self.nterm) + ds = ds.assign(sign=self.sign, rhs=self.rhs) + if self._dual is not None: + ds = ds.assign(dual=self._active_to_dataarray(self._dual, fill=np.nan)) + return ds.assign_attrs(self.attrs) + + def __repr__(self) -> str: + """Print the constraint without reconstructing the full Dataset.""" + max_lines = options["display_max_rows"] + coords = self._coords + shape = self.shape + dim_names = self.coord_names + size = self.full_size + # Map active rows (CSR is active-only) back to full flat positions + csr = self._csr + nterm = self.nterm + active_positions = self.active_positions + masked_entries = size - len(active_positions) + + # pos_to_row[flat_idx] -> CSR row index, or -1 if masked + # active_positions is sorted (labels assigned in order) + pos_to_row = np.full(size, -1, dtype=np.intp) + pos_to_row[active_positions] = np.arange(len(active_positions), dtype=np.intp) + + header_string = f"{self.type} `{self._name}`" if self._name else f"{self.type}" + lines = [] + + def row_expr(row: int) -> str: + start, end = int(csr.indptr[row]), int(csr.indptr[row + 1]) + vars_row = np.full(nterm, -1, dtype=np.int64) + coeffs_row = np.zeros(nterm, dtype=csr.dtype) + vars_row[: end - start] = csr.indices[start:end] + coeffs_row[: end - start] = csr.data[start:end] + sign = self._sign if isinstance(self._sign, str) else self._sign[row] + return f"{print_single_expression(coeffs_row, vars_row, 0, self._model)} {SIGNS_pretty[sign]} {self._rhs[row]}" + + if size > 1: + for indices in generate_indices_for_printout(shape, max_lines): + if indices is None: + lines.append("\t\t...") + else: + coord = [coords[i][int(ind)] for i, ind in enumerate(indices)] + flat_idx = int(np.ravel_multi_index(indices, shape)) + row = pos_to_row[flat_idx] + body = row_expr(row) if row >= 0 else "None" + lines.append(print_coord(coord) + f": {body}") + lines = align_lines_by_delimiter(lines, list(SIGNS_pretty.values())) + + shape_str = ", ".join(f"{d}: {s}" for d, s in zip(dim_names, shape)) mask_str = f" - {masked_entries} masked entries" if masked_entries else "" underscore = "-" * (len(shape_str) + len(mask_str) + len(header_string) + 4) lines.insert(0, f"{header_string} [{shape_str}]{mask_str}:\n{underscore}") elif size == 1: - expr = print_single_expression( - self.coeffs.values, self.vars.values, 0, self.model - ) - lines.append( - f"{header_string}\n{'-' * len(header_string)}\n{expr} {SIGNS_pretty[self.sign.item()]} {self.rhs.item()}" - ) + body = row_expr(0) if len(active_positions) > 0 else "None" + lines.append(f"{header_string}\n{'-' * len(header_string)}\n{body}") else: lines.append(f"{header_string}\n{'-' * len(header_string)}\n") return "\n".join(lines) - def print(self, display_max_rows: int = 20, display_max_terms: int = 20) -> None: + def to_matrix( + self, label_index: VariableLabelIndex | None = None + ) -> tuple[scipy.sparse.csr_array, np.ndarray]: + """Return the stored CSR matrix and con_labels.""" + return self._csr, self._con_labels + + def to_netcdf_ds(self) -> Dataset: + """Return a Dataset with raw CSR components for netcdf serialization.""" + csr = self._csr + data_vars: dict[str, DataArray] = { + "indptr": DataArray(csr.indptr, dims=["_indptr"]), + "indices": DataArray(csr.indices, dims=["_nnz"]), + "data": DataArray(csr.data, dims=["_nnz"]), + "rhs": DataArray(self._rhs, dims=["_flat"]), + "_con_labels": DataArray(self._con_labels, dims=["_flat"]), + } + if isinstance(self._sign, np.ndarray): + data_vars["_sign"] = DataArray(self._sign, dims=["_flat"]) + data_vars.update(coords_to_dataset_vars(self._coords)) + if self._dual is not None: + data_vars["dual"] = DataArray(self._dual, dims=["_flat"]) + dim_names = [c.name for c in self._coords] + attrs: dict[str, Any] = { + "_linopy_format": "csr", + "cindex": self._cindex if self._cindex is not None else -1, + "shape": list(csr.shape), + "coord_dims": dim_names, + "name": self._name, + } + if isinstance(self._sign, str): + attrs["sign"] = self._sign + return Dataset(data_vars, attrs=attrs) + + @classmethod + def from_netcdf_ds(cls, ds: Dataset, model: Model, name: str) -> CSRConstraint: + """Reconstruct a Constraint from a netcdf Dataset (CSR format).""" + attrs = ds.attrs + shape = tuple(attrs["shape"]) + csr = scipy.sparse.csr_array( + (ds["data"].values, ds["indices"].values, ds["indptr"].values), + shape=shape, + ) + rhs = ds["rhs"].values + sign: str | np.ndarray = ds["_sign"].values if "_sign" in ds else attrs["sign"] + _cindex_raw = int(attrs["cindex"]) + cindex: int | None = _cindex_raw if _cindex_raw >= 0 else None + coord_dims = attrs["coord_dims"] + if isinstance(coord_dims, str): + coord_dims = [coord_dims] + coords = coords_from_dataset(ds, coord_dims) + dual = ds["dual"].values if "dual" in ds else None + if "_con_labels" in ds: + con_labels = ds["_con_labels"].values + elif cindex is not None: + con_labels = np.arange(cindex, cindex + len(rhs), dtype=np.intp) + else: + con_labels = np.arange(len(rhs), dtype=np.intp) + return cls( + csr, con_labels, rhs, sign, coords, model, name, cindex=cindex, dual=dual + ) + + def has_variable(self, variable: variables.Variable) -> bool: + vlabels = self._model.variables.label_index.vlabels + return bool( + np.isin(vlabels[self._csr.indices], variable.labels.values.ravel()).any() + ) + + def to_matrix_with_rhs( + self, label_index: VariableLabelIndex + ) -> tuple[scipy.sparse.csr_array, np.ndarray, np.ndarray, np.ndarray]: + """Return (csr, con_labels, b, sense) — all pre-stored, no recomputation.""" + if isinstance(self._sign, str): + sense = np.full(len(self._rhs), self._sign[0]) + else: + sense = np.array([s[0] for s in self._sign]) + return self._csr, self._con_labels, self._rhs, sense + + def sanitize_zeros(self) -> CSRConstraint: + """Remove terms with zero or near-zero coefficients (mutates in-place).""" + self._csr.data[np.abs(self._csr.data) <= 1e-10] = 0 + self._csr.eliminate_zeros() + return self + + def sanitize_missings(self) -> CSRConstraint: + """No-op: missing rows are already excluded during freezing.""" + return self + + def sanitize_infinities(self) -> CSRConstraint: + """Mask out rows with invalid infinite RHS values (mutates in-place).""" + if isinstance(self._sign, str): + if self._sign == LESS_EQUAL: + invalid = self._rhs == np.inf + elif self._sign == GREATER_EQUAL: + invalid = self._rhs == -np.inf + else: + return self + else: + invalid = ((self._sign == LESS_EQUAL) & (self._rhs == np.inf)) | ( + (self._sign == GREATER_EQUAL) & (self._rhs == -np.inf) + ) + if not invalid.any(): + return self + keep = ~invalid + self._csr = self._csr[keep] + self._con_labels = self._con_labels[keep] + self._rhs = self._rhs[keep] + if not isinstance(self._sign, str): + self._sign = self._sign[keep] + return self + + def freeze(self) -> CSRConstraint: + """Return self (already immutable).""" + return self + + def mutable(self) -> Constraint: + """Convert to a MutableConstraint.""" + return Constraint(self.data, self._model, self._name) + + def to_polars(self) -> pl.DataFrame: + """Convert to polars DataFrame — delegates to mutable().""" + return self.mutable().to_polars() + + @classmethod + def from_mutable( + cls, + con: Constraint, + cindex: int | None = None, + ) -> CSRConstraint: """ - Print the linear expression. + Create a Constraint from a MutableConstraint. Parameters ---------- - display_max_rows : int - Maximum number of rows to be displayed. - display_max_terms : int - Maximum number of terms to be displayed. - """ - with options as opts: - opts.set_value( - display_max_rows=display_max_rows, display_max_terms=display_max_terms - ) - print(self) + con : MutableConstraint + cindex : int or None + Starting label index, if assigned. + """ + label_index = con.model.variables.label_index + csr, con_labels = con.to_matrix(label_index) + coords = [con.indexes[d] for d in con.coord_dims] + # Build active_mask aligned with con_labels (rows in csr) + # Use same filter as to_matrix: label != -1 AND at least one var != -1 + labels_flat = con.labels.values.ravel() + vars_flat = con.vars.values.reshape(len(labels_flat), -1) + active_mask = (labels_flat != -1) & (vars_flat != -1).any(axis=1) + rhs = con.rhs.values.ravel()[active_mask] + sign_vals = con.sign.values.ravel() + active_signs = sign_vals[active_mask] + unique_signs = np.unique(active_signs) + if len(unique_signs) == 0: + sign: str | np.ndarray = "=" + elif len(unique_signs) == 1: + sign = str(unique_signs[0]) + else: + sign = active_signs + dual = ( + con.data["dual"].values.ravel()[active_mask] if "dual" in con.data else None + ) + return cls( + csr, + con_labels, + rhs, + sign, + coords, + con.model, + con.name, + cindex=cindex, + dual=dual, + ) - def __contains__(self, value: Any) -> bool: - return self.data.__contains__(value) + +class Constraint(ConstraintBase): + """ + Mutable constraint backed by an xarray Dataset. + + This is the original Constraint implementation, renamed to MutableConstraint. + Supports setters, xarray operations via conwrap, and from_rule construction. + """ + + __slots__ = ("_data", "_model", "_assigned") + + def __init__( + self, + data: Dataset, + model: Model, + name: str = "", + skip_broadcast: bool = False, + ) -> None: + from linopy.model import Model + + if not isinstance(data, Dataset): + raise ValueError(f"data must be a Dataset, got {type(data)}") + + if not isinstance(model, Model): + raise ValueError(f"model must be a Model, got {type(model)}") + + for attr in ("coeffs", "vars", "sign", "rhs"): + if attr not in data: + raise ValueError(f"missing '{attr}' in data") + + data = data.assign_attrs(name=name) + + if not skip_broadcast: + (data,) = xr.broadcast(data, exclude=[TERM_DIM]) + + self._assigned = "labels" in data + self._data = data + self._model = model @property - def type(self) -> str: - """ - Get the type of the constraint. - """ - return "Constraint" if self.is_assigned else "Constraint (unassigned)" + def data(self) -> Dataset: + return self._data @property - def range(self) -> tuple[int, int]: - """ - Return the range of the constraint. - """ - return self.data.attrs["label_range"] + def model(self) -> Model: + return self._model @property - def term_dim(self) -> str: - """ - Return the term dimension of the constraint. - """ - return TERM_DIM + def name(self) -> str: + return self.attrs["name"] @property - def mask(self) -> DataArray | None: - """ - Get the mask of the constraint. + def is_assigned(self) -> bool: + return self._assigned - The mask indicates on which coordinates the constraint is enabled - (True) and disabled (False). + @property + def range(self) -> tuple[int, int]: + """Return the range of the constraint.""" + return self.data.attrs["label_range"] - Returns - ------- - xr.DataArray - """ - if self.is_assigned: - return (self.data.labels != FILL_VALUE["labels"]).astype(bool) - return None + @property + def loc(self) -> LocIndexer: + return LocIndexer(self) @property - def coeffs(self) -> DataArray: - """ - Get the left-hand-side coefficients of the constraint. + def labels(self) -> DataArray: + return self.data.get("labels", DataArray([])) - The function raises an error in case no model is set as a - reference. - """ + @property + def coeffs(self) -> DataArray: return self.data.coeffs @coeffs.setter @@ -408,12 +1061,6 @@ def coeffs(self, value: ConstantLike) -> None: @property def vars(self) -> DataArray: - """ - Get the left-hand-side variables of the constraint. - - The function raises an error in case no model is set as a - reference. - """ return self.data.vars @vars.setter @@ -425,34 +1072,8 @@ def vars(self, value: variables.Variable | DataArray) -> None: value = value.broadcast_like(self.coeffs, exclude=[self.term_dim]) self._data = assign_multiindex_safe(self.data, vars=value) - @property - def lhs(self) -> expressions.LinearExpression: - """ - Get the left-hand-side linear expression of the constraint. - - The function raises an error in case no model is set as a - reference. - """ - data = self.data[["coeffs", "vars"]].rename({self.term_dim: TERM_DIM}) - return expressions.LinearExpression(data, self.model) - - @lhs.setter - def lhs(self, value: ExpressionLike | VariableLike | ConstantLike) -> None: - value = expressions.as_expression( - value, self.model, coords=self.coords, dims=self.coord_dims - ) - self._data = self.data.drop_vars(["coeffs", "vars"]).assign( - coeffs=value.coeffs, vars=value.vars, rhs=self.rhs - value.const - ) - @property def sign(self) -> DataArray: - """ - Get the signs of the constraint. - - The function raises an error in case no model is set as a - reference. - """ return self.data.sign @sign.setter @@ -463,12 +1084,6 @@ def sign(self, value: SignLike) -> None: @property def rhs(self) -> DataArray: - """ - Get the right hand side constants of the constraint. - - The function raises an error in case no model is set as a - reference. - """ return self.data.rhs @rhs.setter @@ -479,15 +1094,23 @@ def rhs(self, value: ExpressionLike) -> None: self.lhs = self.lhs - value.reset_const() self._data = assign_multiindex_safe(self.data, rhs=value.const) + @property + def lhs(self) -> expressions.LinearExpression: + data = self.data[["coeffs", "vars"]].rename({self.term_dim: TERM_DIM}) + return expressions.LinearExpression(data, self.model) + + @lhs.setter + def lhs(self, value: ExpressionLike | VariableLike | ConstantLike) -> None: + value = expressions.as_expression( + value, self.model, coords=self.coords, dims=self.coord_dims + ) + self._data = self.data.drop_vars(["coeffs", "vars"]).assign( + coeffs=value.coeffs, vars=value.vars, rhs=self.rhs - value.const + ) + @property @has_optimized_model def dual(self) -> DataArray: - """ - Get the dual values of the constraint. - - The function raises an error in case no model is set as a - reference or the model status is not okay. - """ if "dual" not in self.data: raise AttributeError( "Underlying is optimized but does not have dual values stored." @@ -496,12 +1119,60 @@ def dual(self) -> DataArray: @dual.setter def dual(self, value: ConstantLike) -> None: - """ - Get the dual values of the constraint. - """ value = DataArray(value).broadcast_like(self.labels) self._data = assign_multiindex_safe(self.data, dual=value) + def has_variable(self, variable: variables.Variable) -> bool: + return bool(self.data["vars"].isin(variable.labels.values.ravel()).any()) + + def to_matrix_with_rhs( + self, label_index: VariableLabelIndex + ) -> tuple[scipy.sparse.csr_array, np.ndarray, np.ndarray, np.ndarray]: + """Return (csr, con_labels, b, sense) in one pass.""" + csr, con_labels = self.to_matrix(label_index) + nonempty = np.diff(csr.indptr).astype(bool) + active_rows = np.flatnonzero(nonempty) + b = self.rhs.values.ravel()[active_rows] + sign_flat = self.sign.values.ravel()[active_rows] + sense = np.array([s[0] for s in sign_flat]) + return csr, con_labels, b, sense + + def sanitize_zeros(self) -> Constraint: + """Remove terms with zero or near-zero coefficients.""" + not_zero = abs(self.coeffs) > 1e-10 + self.vars = self.vars.where(not_zero, -1) + self.coeffs = self.coeffs.where(not_zero) + return self + + def sanitize_missings(self) -> Constraint: + """Mask out rows where all variables are missing (-1).""" + contains_non_missing = (self.vars != -1).any(self.term_dim) + labels = self.labels.where(contains_non_missing, -1) + self._data = assign_multiindex_safe(self.data, labels=labels) + return self + + def sanitize_infinities(self) -> Constraint: + """Mask out rows with invalid infinite RHS values.""" + valid_infinity_values = ((self.sign == LESS_EQUAL) & (self.rhs == np.inf)) | ( + (self.sign == GREATER_EQUAL) & (self.rhs == -np.inf) + ) + labels = self.labels.where(~valid_infinity_values, -1) + self._data = assign_multiindex_safe(self.data, labels=labels) + return self + + def freeze(self) -> CSRConstraint: + """Convert to an immutable Constraint.""" + cindex = ( + int(self.data.attrs["label_range"][0]) + if "label_range" in self.data.attrs + else None + ) + return CSRConstraint.from_mutable(self, cindex=cindex) + + def mutable(self) -> Constraint: + """Return self (already mutable).""" + return self + @classmethod def from_rule(cls, model: Model, rule: Callable, coords: CoordsLike) -> Constraint: """ @@ -510,7 +1181,6 @@ def from_rule(cls, model: Model, rule: Callable, coords: CoordsLike) -> Constrai This functionality mirrors the assignment of constraints as done by Pyomo. - Parameters ---------- model : linopy.Model @@ -528,14 +1198,13 @@ def from_rule(cls, model: Model, rule: Callable, coords: CoordsLike) -> Constrai The order and size of coords has to be same as the argument list followed by `model` in function `rule`. - Returns ------- - linopy.Constraint + linopy.MutableConstraint Examples -------- - >>> from linopy import Model, LinearExpression, Constraint + >>> from linopy import Model, LinearExpression, MutableConstraint >>> m = Model() >>> coords = pd.RangeIndex(10), ["a", "b"] >>> x = m.add_variables(0, 100, coords) @@ -545,14 +1214,13 @@ def from_rule(cls, model: Model, rule: Callable, coords: CoordsLike) -> Constrai ... else: ... return i * x.at[i, j] >= 0 ... - >>> con = Constraint.from_rule(m, bound, coords) + >>> con = MutableConstraint.from_rule(m, bound, coords) >>> con = m.add_constraints(con) """ if not isinstance(coords, DataArrayCoordinates): coords = DataArray(coords=coords).coords shape = list(map(len, coords.values())) - # test output type output = rule(model, *[c.values[0] for c in coords.values()]) if not isinstance(output, AnonymousScalarConstraint) and output is not None: msg = f"`rule` has to return AnonymousScalarConstraint not {type(output)}." @@ -572,37 +1240,6 @@ def from_rule(cls, model: Model, rule: Callable, coords: CoordsLike) -> Constrai data = lhs.data.assign(sign=sign, rhs=rhs) return cls(data, model=model) - @property - def flat(self) -> pd.DataFrame: - """ - Convert the constraint to a pandas DataFrame. - - The resulting DataFrame represents a long table format of the all - non-masked constraints with non-zero coefficients. It contains the - columns `labels`, `coeffs`, `vars`, `rhs`, `sign`. - - Returns - ------- - df : pandas.DataFrame - """ - ds = self.data - - def mask_func(data: pd.DataFrame) -> pd.Series: - mask = (data["vars"] != -1) & (data["coeffs"] != 0) - if "labels" in data: - mask &= data["labels"] != -1 - return mask - - df = to_dataframe(ds, mask_func=mask_func) - - # Group repeated variables in the same constraint - agg_custom = {k: "first" for k in list(df.columns)} - agg_standards = dict(coeffs="sum", rhs="first", sign="first") - agg = {**agg_custom, **agg_standards} - df = df.groupby(["labels", "vars"], as_index=False).aggregate(agg) - check_has_nulls(df, name=f"{self.type} {self.name}") - return df - def to_polars(self) -> pl.DataFrame: """ Convert the constraint to a polars DataFrame. @@ -625,8 +1262,6 @@ def to_polars(self) -> pl.DataFrame: long = maybe_group_terms_polars(long) check_has_nulls_polars(long, name=f"{self.type} {self.name}") - # Build short DataFrame (labels, rhs, sign) without xarray broadcast. - # Apply labels mask directly instead of filter_nulls_polars. labels_flat = ds["labels"].values.reshape(-1) mask = labels_flat != -1 labels_masked = labels_flat[mask] @@ -655,55 +1290,29 @@ def to_polars(self) -> pl.DataFrame: df = long.join(short, on="labels", how="inner") return df[["labels", "coeffs", "vars", "sign", "rhs"]] - # Wrapped function which would convert variable to dataarray + # Wrapped xarray methods — only available on MutableConstraint assign = conwrap(Dataset.assign) - assign_multiindex_safe = conwrap(assign_multiindex_safe) - assign_attrs = conwrap(Dataset.assign_attrs) - assign_coords = conwrap(Dataset.assign_coords) - - # bfill = conwrap(Dataset.bfill) - broadcast_like = conwrap(Dataset.broadcast_like) - chunk = conwrap(Dataset.chunk) - drop_sel = conwrap(Dataset.drop_sel) - drop_isel = conwrap(Dataset.drop_isel) - expand_dims = conwrap(Dataset.expand_dims) - - # ffill = conwrap(Dataset.ffill) - sel = conwrap(Dataset.sel) - isel = conwrap(Dataset.isel) - shift = conwrap(Dataset.shift) - swap_dims = conwrap(Dataset.swap_dims) - set_index = conwrap(Dataset.set_index) - - reindex = conwrap(Dataset.reindex, fill_value=_fill_value) - - reindex_like = conwrap(Dataset.reindex_like, fill_value=_fill_value) - + reindex = conwrap(Dataset.reindex, fill_value=FILL_VALUE) + reindex_like = conwrap(Dataset.reindex_like, fill_value=FILL_VALUE) rename = conwrap(Dataset.rename) - rename_dims = conwrap(Dataset.rename_dims) - roll = conwrap(Dataset.roll) - stack = conwrap(Dataset.stack) - unstack = conwrap(Dataset.unstack) - iterate_slices = iterate_slices - @dataclass(repr=False) class Constraints: @@ -711,7 +1320,7 @@ class Constraints: A constraint container used for storing multiple constraint arrays. """ - data: dict[str, Constraint] + data: dict[str, ConstraintBase] model: Model _label_position_index: LabelPositionIndex | None = None @@ -752,17 +1361,17 @@ def __repr__(self) -> str: return r @overload - def __getitem__(self, names: str) -> Constraint: ... + def __getitem__(self, names: str) -> ConstraintBase: ... @overload def __getitem__(self, names: list[str]) -> Constraints: ... - def __getitem__(self, names: str | list[str]) -> Constraint | Constraints: + def __getitem__(self, names: str | list[str]) -> ConstraintBase | Constraints: if isinstance(names, str): return self.data[names] return Constraints({name: self.data[name] for name in names}, self.model) - def __getattr__(self, name: str) -> Constraint: + def __getattr__(self, name: str) -> ConstraintBase: # If name is an attribute of self (including methods and properties), return that if name in self.data: return self.data[name] @@ -792,7 +1401,7 @@ def __len__(self) -> int: def __iter__(self) -> Iterator[str]: return self.data.__iter__() - def items(self) -> ItemsView[str, Constraint]: + def items(self) -> ItemsView[str, ConstraintBase]: return self.data.items() def _ipython_key_completions_(self) -> list[str]: @@ -805,12 +1414,15 @@ def _ipython_key_completions_(self) -> list[str]: """ return list(self) - def add(self, constraint: Constraint) -> None: + def add(self, constraint: ConstraintBase, freeze: bool = False) -> ConstraintBase: """ - Add a constraint to the constraints constrainer. + Add a constraint to the constraints container. """ + if freeze and isinstance(constraint, Constraint): + constraint = constraint.freeze() self.data[constraint.name] = constraint self._invalidate_label_position_index() + return constraint def remove(self, name: str) -> None: """ @@ -897,33 +1509,7 @@ def ncons(self) -> int: This excludes constraints with missing labels or where all variables are masked (vars == -1). """ - total = 0 - for con in self.data.values(): - labels = con.labels.values - vars_arr = con.vars.values - - # Handle scalar constraint (single constraint, labels is 0-d) - if labels.ndim == 0: - # Scalar: valid if label != -1 and any var != -1 - if labels != -1 and (vars_arr != -1).any(): - total += 1 - continue - - # Array constraint: labels has constraint dimensions, vars has - # constraint dimensions + _term dimension - valid_labels = labels != -1 - - # Check if any variable in each constraint is valid (not -1) - # vars has shape (..., n_terms) where ... matches labels shape - has_valid_var = (vars_arr != -1).any(axis=-1) - - active = valid_labels & has_valid_var - - if con.mask is not None: - active = active & con.mask.values - - total += int(active.sum()) - return total + return sum(con.ncons for con in self.data.values()) @property def inequalities(self) -> Constraints: @@ -943,34 +1529,27 @@ def sanitize_zeros(self) -> None: """ Filter out terms with zero and close-to-zero coefficient. """ - for name in self: - not_zero = abs(self[name].coeffs) > 1e-10 - con = self[name] - con.vars = self[name].vars.where(not_zero, -1) - con.coeffs = self[name].coeffs.where(not_zero) + for con in self.data.values(): + con.sanitize_zeros() def sanitize_missings(self) -> None: """ Set constraints labels to -1 where all variables in the lhs are missing. """ - for name in self: - con = self[name] - contains_non_missing = (con.vars != -1).any(con.term_dim) - labels = self[name].labels.where(contains_non_missing, -1) - con._data = assign_multiindex_safe(con.data, labels=labels) + for con in self.data.values(): + con.sanitize_missings() def sanitize_infinities(self) -> None: """ - Replace infinite values in the constraints with a large value. + Remove constraints whose RHS is an invalid infinity. + + Constraints with ``rhs == inf`` and sign ``<=``, or ``rhs == -inf`` + and sign ``>=``, are trivially satisfied and are masked out (label set + to -1). """ - for name in self: - con = self[name] - valid_infinity_values = ((con.sign == LESS_EQUAL) & (con.rhs == np.inf)) | ( - (con.sign == GREATER_EQUAL) & (con.rhs == -np.inf) - ) - labels = con.labels.where(~valid_infinity_values, -1) - con._data = assign_multiindex_safe(con.data, labels=labels) + for con in self.data.values(): + con.sanitize_infinities() def get_name_by_label(self, label: int | float) -> str: """ @@ -1056,6 +1635,8 @@ def set_blocks(self, block_map: np.ndarray) -> None: N = block_map.max() for name, constraint in self.items(): + if not isinstance(constraint, Constraint): + self.data[name] = constraint = constraint.mutable() res = xr.full_like(constraint.labels, N + 1, dtype=block_map.dtype) entries = replace_by_map(constraint.vars, block_map) @@ -1091,39 +1672,58 @@ def flat(self) -> pd.DataFrame: df["key"] = df.labels.map(map_labels) return df - def to_matrix(self, filter_missings: bool = True) -> scipy.sparse.csc_matrix: + def to_matrix(self) -> tuple[scipy.sparse.csr_array, np.ndarray]: """ - Construct a constraint matrix in sparse format. + Construct a constraint matrix in sparse format by stacking per-constraint CSR matrices. - Missing values, i.e. -1 in labels and vars, are ignored filtered - out. - """ - # TODO: rename "filter_missings" to "~labels_as_coordinates" - cons = self.flat + Each per-constraint CSR is already dense: rows are active constraints + only, column indices are dense variable positions (not raw labels). + Shape is ``(n_active_cons, n_active_vars)``. + Returns + ------- + matrix : scipy.sparse.csr_array + Shape ``(n_active_cons, n_active_vars)``. + con_labels : np.ndarray + Shape ``(n_active_cons,)``, maps each matrix row to the + original constraint label. + """ if not len(self): raise ValueError("No constraints available to convert to matrix.") - if filter_missings: - vars = self.model.variables.flat - shape = (cons.key.max() + 1, vars.key.max() + 1) - cons["vars"] = cons.vars.map(vars.set_index("labels").key) - return scipy.sparse.csc_matrix( - (cons.coeffs, (cons.key, cons.vars)), shape=shape - ) - else: - shape = self.model.shape - return scipy.sparse.csc_matrix( - (cons.coeffs, (cons.labels, cons.vars)), shape=shape - ) + label_index = self.model.variables.label_index + csrs = [] + con_labels_list = [] + for c in self.data.values(): + csr, con_labels = c.to_matrix(label_index) + csrs.append(csr) + con_labels_list.append(con_labels) + return scipy.sparse.vstack(csrs, format="csr"), np.concatenate(con_labels_list) def reset_dual(self) -> None: """ Reset the stored solution of variables. """ for k, c in self.items(): - if "dual" in c: - c._data = c.data.drop_vars("dual") + if isinstance(c, CSRConstraint): + if c._dual is not None: + self.data[k] = CSRConstraint( + c._csr, + c._con_labels, + c._rhs, + c._sign, + c._coords, + c._model, + c._name, + cindex=c._cindex, + dual=None, + ) + elif isinstance(c, Constraint): + if "dual" in c.data: + c._data = c.data.drop_vars("dual") + else: + msg = f"reset_dual encountered an unknown constraint type: {type(c)}" + raise NotImplementedError(msg) class AnonymousScalarConstraint: diff --git a/linopy/expressions.py b/linopy/expressions.py index d2ae9022..6a46834b 100644 --- a/linopy/expressions.py +++ b/linopy/expressions.py @@ -30,6 +30,7 @@ from xarray import Coordinates, DataArray, Dataset, IndexVariable from xarray.core.coordinates import DataArrayCoordinates, DatasetCoordinates from xarray.core.indexes import Indexes +from xarray.core.types import JoinOptions from xarray.core.utils import Frozen try: @@ -38,7 +39,7 @@ from xarray.computation.rolling import DatasetRolling except ImportError: import xarray.core.rolling - from xarray.core.rolling import DatasetRolling # type: ignore + from xarray.core.rolling import DatasetRolling # type: ignore[no-redef] from types import EllipsisType, NotImplementedType @@ -90,7 +91,10 @@ ) if TYPE_CHECKING: - from linopy.constraints import AnonymousScalarConstraint, Constraint + from linopy.constraints import ( + AnonymousScalarConstraint, + Constraint, + ) from linopy.model import Model from linopy.piecewise import PiecewiseConstraintDescriptor, PiecewiseExpression from linopy.variables import ScalarVariable, Variable @@ -366,6 +370,7 @@ def __init__(self, data: Dataset | Any | None, model: Model) -> None: f"data must be an instance of {supported_types}, got {type(data)}" ) + data = cast(Dataset, data) if not set(data).issuperset({"coeffs", "vars"}): raise ValueError( "data must contain the fields 'coeffs' and 'vars' or 'const'" @@ -387,6 +392,7 @@ def __init__(self, data: Dataset | Any | None, model: Model) -> None: data = assign_multiindex_safe(data, const=data.const.astype(float)) (data,) = xr.broadcast(data, exclude=HELPER_DIMS) + data = cast(Dataset, data) (coeffs_vars,) = xr.broadcast(data[["coeffs", "vars"]], exclude=[FACTOR_DIM]) coeffs_vars_dict = {str(k): v for k, v in coeffs_vars.items()} data = assign_multiindex_safe(data, **coeffs_vars_dict) @@ -403,7 +409,7 @@ def __init__(self, data: Dataset | Any | None, model: Model) -> None: raise ValueError("model must be an instance of linopy.Model") self._model = model - self._data = data + self._data = cast(Dataset, data) def __repr__(self) -> str: """ @@ -542,13 +548,13 @@ def _multiply_by_linear_expression( res = res + self.const * other.reset_const() if other.has_constant: res = res + self.reset_const() * other.const - return res + return cast(QuadraticExpression, res) def _align_constant( self: GenericExpression, other: DataArray, fill_value: float = 0, - join: str | None = None, + join: JoinOptions | None = None, ) -> tuple[DataArray, DataArray, bool]: """ Align a constant DataArray with self.const. @@ -586,12 +592,12 @@ def _align_constant( self.const, other, join=join, - fill_value=fill_value, # type: ignore[call-overload] + fill_value=fill_value, ) return self_const, aligned, True def _add_constant( - self: GenericExpression, other: ConstantLike, join: str | None = None + self: GenericExpression, other: ConstantLike, join: JoinOptions | None = None ) -> GenericExpression: # NaN values in self.const or other are filled with 0 (additive identity) # so that missing data does not silently propagate through arithmetic. @@ -617,7 +623,7 @@ def _apply_constant_op( other: ConstantLike, op: Callable[[DataArray, DataArray], DataArray], fill_value: float, - join: str | None = None, + join: JoinOptions | None = None, ) -> GenericExpression: """ Apply a constant operation (mul, div, etc.) to this expression with a scalar or array. @@ -645,12 +651,12 @@ def _apply_constant_op( return self.assign(coeffs=op(coeffs, factor), const=op(self_const, factor)) def _multiply_by_constant( - self: GenericExpression, other: ConstantLike, join: str | None = None + self: GenericExpression, other: ConstantLike, join: JoinOptions | None = None ) -> GenericExpression: return self._apply_constant_op(other, operator.mul, fill_value=0, join=join) def _divide_by_constant( - self: GenericExpression, other: ConstantLike, join: str | None = None + self: GenericExpression, other: ConstantLike, join: JoinOptions | None = None ) -> GenericExpression: return self._apply_constant_op(other, operator.truediv, fill_value=1, join=join) @@ -718,7 +724,7 @@ def __lt__(self, other: Any) -> NotImplementedType: def add( self: GenericExpression, other: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> GenericExpression | QuadraticExpression: """ Add an expression to others. @@ -746,7 +752,7 @@ def add( def sub( self: GenericExpression, other: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> GenericExpression | QuadraticExpression: """ Subtract others from expression. @@ -765,7 +771,7 @@ def sub( def mul( self: GenericExpression, other: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> GenericExpression | QuadraticExpression: """ Multiply the expr by a factor. @@ -790,7 +796,7 @@ def mul( def div( self: GenericExpression, other: VariableLike | ConstantLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> GenericExpression | QuadraticExpression: """ Divide the expr by a factor. @@ -817,7 +823,7 @@ def div( def le( self: GenericExpression, rhs: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> Constraint: """ Less than or equal constraint. @@ -834,9 +840,9 @@ def le( return self.to_constraint(LESS_EQUAL, rhs, join=join) def ge( - self: GenericExpression, + self, rhs: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> Constraint: """ Greater than or equal constraint. @@ -853,9 +859,9 @@ def ge( return self.to_constraint(GREATER_EQUAL, rhs, join=join) def eq( - self: GenericExpression, + self, rhs: SideLike, - join: str | None = None, + join: JoinOptions | None = None, ) -> Constraint: """ Equality constraint. @@ -1006,7 +1012,7 @@ def _map_solution(self) -> DataArray: sol = pd.Series(m.matrices.sol, m.matrices.vlabels) sol[-1] = np.nan idx = np.ravel(self.vars) - values = sol[idx].to_numpy().reshape(self.vars.shape) + values = np.asarray(sol[idx]).reshape(self.vars.shape) return xr.DataArray(values, dims=self.vars.dims, coords=self.vars.coords) @property @@ -1111,7 +1117,7 @@ def cumsum( return self.rolling(dim=dim_dict).sum(keep_attrs=keep_attrs, skipna=skipna) def to_constraint( - self, sign: SignLike, rhs: SideLike, join: str | None = None + self, sign: SignLike, rhs: SideLike, join: JoinOptions | None = None ) -> Constraint: """ Convert a linear expression to a constraint. @@ -1725,7 +1731,7 @@ def flat(self) -> pd.DataFrame: """ ds = self.data - def mask_func(data: pd.DataFrame) -> pd.Series: + def mask_func(data: dict) -> pd.Series: mask = (data["vars"] != -1) & (data["coeffs"] != 0) return mask @@ -2208,7 +2214,7 @@ def solution(self) -> DataArray: return sol.rename("solution") def to_constraint( - self, sign: SignLike, rhs: SideLike, join: str | None = None + self, sign: SignLike, rhs: SideLike, join: JoinOptions | None = None ) -> NotImplementedType: raise NotImplementedError( "Quadratic expressions cannot be used in constraints." @@ -2224,7 +2230,7 @@ def flat(self) -> DataFrame: ).to_dataset(FACTOR_DIM) ds = self.data.drop_vars("vars").assign(vars) - def mask_func(data: pd.DataFrame) -> pd.Series: + def mask_func(data: dict) -> pd.Series: mask = ((data["vars1"] != -1) | (data["vars2"] != -1)) & ( data["coeffs"] != 0 ) @@ -2341,7 +2347,7 @@ def merge( ], dim: str = TERM_DIM, cls: type[GenericExpression] = None, # type: ignore - join: str | None = None, + join: JoinOptions | None = None, **kwargs: Any, ) -> GenericExpression: """ diff --git a/linopy/io.py b/linopy/io.py index 45740a2f..715d1a3b 100644 --- a/linopy/io.py +++ b/linopy/io.py @@ -1132,7 +1132,7 @@ def with_prefix(ds: xr.Dataset, prefix: str) -> xr.Dataset: with_prefix(var.data, f"variables-{name}") for name, var in m.variables.items() ] cons = [ - with_prefix(con.data, f"constraints-{name}") + with_prefix(con.to_netcdf_ds(), f"constraints-{name}") for name, con in m.constraints.items() ] objective = m.objective.data @@ -1170,14 +1170,15 @@ def read_netcdf(path: Path | str, **kwargs: Any) -> Model: ------- m : linopy.Model """ - from linopy.model import ( + from linopy.constraints import ( Constraint, + ConstraintBase, Constraints, - LinearExpression, - Model, - Variable, - Variables, + CSRConstraint, ) + from linopy.expressions import LinearExpression + from linopy.model import Model + from linopy.variables import Variable, Variables if isinstance(path, str): path = Path(path) @@ -1224,10 +1225,14 @@ def get_prefix(ds: xr.Dataset, prefix: str) -> xr.Dataset: cons = [str(k) for k in ds if str(k).startswith("constraints")] con_names = list({str(k).rsplit("-", 1)[0] for k in cons}) - constraints = {} + constraints: dict[str, ConstraintBase] = {} for k in sorted(con_names): name = remove_prefix(k, "constraints") - constraints[name] = Constraint(get_prefix(ds, k), m, name) + con_ds = get_prefix(ds, k) + if con_ds.attrs.get("_linopy_format") == "csr": + constraints[name] = CSRConstraint.from_netcdf_ds(con_ds, m, name) + else: + constraints[name] = Constraint(con_ds, m, name) m._constraints = Constraints(constraints, m) objective = get_prefix(ds, "objective") @@ -1283,15 +1288,10 @@ def copy(m: Model, include_solution: bool = False, deep: bool = True) -> Model: Model A deep or shallow copy of the model. """ - from linopy.model import ( - Constraint, - Constraints, - LinearExpression, - Model, - Objective, - Variable, - Variables, - ) + from linopy.constraints import Constraint, Constraints + from linopy.expressions import LinearExpression + from linopy.model import Model, Objective + from linopy.variables import Variable, Variables SOLVE_STATE_ATTRS = {"status", "termination_condition"} @@ -1319,9 +1319,9 @@ def copy(m: Model, include_solution: bool = False, deep: bool = True) -> Model: new_model._constraints = Constraints( { name: Constraint( - con.data.copy(deep=deep) + con.mutable().data.copy(deep=deep) if include_solution - else con.data[m.constraints.dataset_attrs].copy(deep=deep), + else con.mutable().data[m.constraints.dataset_attrs].copy(deep=deep), new_model, name, ) @@ -1332,7 +1332,11 @@ def copy(m: Model, include_solution: bool = False, deep: bool = True) -> Model: obj_expr = LinearExpression(m.objective.expression.data.copy(deep=deep), new_model) new_model._objective = Objective(obj_expr, new_model, m.objective.sense) - new_model._objective._value = m.objective.value if include_solution else None + new_model._objective._value = ( + float(m.objective.value) + if (include_solution and m.objective.value is not None) + else None + ) new_model._parameters = m._parameters.copy(deep=deep) new_model._blocks = m._blocks.copy(deep=deep) if m._blocks is not None else None diff --git a/linopy/matrices.py b/linopy/matrices.py index e1489e76..1fb59344 100644 --- a/linopy/matrices.py +++ b/linopy/matrices.py @@ -7,175 +7,175 @@ from __future__ import annotations -from functools import cached_property -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast import numpy as np -import pandas as pd +import scipy.sparse from numpy import ndarray -from pandas.core.indexes.base import Index -from pandas.core.series import Series -from scipy.sparse._csc import csc_matrix from linopy import expressions +from linopy.constraints import CSRConstraint if TYPE_CHECKING: from linopy.model import Model -def create_vector( - indices: Series | Index, - values: Series | ndarray, - fill_value: str | float | int = np.nan, - shape: int | None = None, -) -> ndarray: - """Create a vector of a size equal to the maximum index plus one.""" - if shape is None: - max_value = indices.max() - if not isinstance(max_value, np.integer | int): - raise ValueError("Indices must be integers.") - shape = max_value + 1 - vector = np.full(shape, fill_value) - vector[indices] = values - return vector - - class MatrixAccessor: """ Helper class to quickly access model related vectors and matrices. + + All arrays are compact — only active (non-masked) entries are included. + Position i in variable-side arrays corresponds to vlabels[i]. + Position i in constraint-side arrays corresponds to clabels[i]. """ def __init__(self, model: Model) -> None: self._parent = model + self._build_vars() + self._build_cons() - def clean_cached_properties(self) -> None: - """Clear the cache for all cached properties of an object""" - - for cached_prop in ["flat_vars", "flat_cons", "sol", "dual"]: - # check existence of cached_prop without creating it - if cached_prop in self.__dict__: - delattr(self, cached_prop) - - @cached_property - def flat_vars(self) -> pd.DataFrame: + def _build_vars(self) -> None: m = self._parent - return m.variables.flat + label_index = m.variables.label_index + self.vlabels: ndarray = label_index.vlabels - @cached_property - def flat_cons(self) -> pd.DataFrame: - m = self._parent - return m.constraints.flat + lb_list = [] + ub_list = [] + vtypes_list = [] - @property - def vlabels(self) -> ndarray: - """Vector of labels of all non-missing variables.""" - df: pd.DataFrame = self.flat_vars - return create_vector(df.key, df.labels, -1) + for name, var in m.variables.items(): + labels = var.labels.values.ravel() + mask = labels != -1 - @property - def vtypes(self) -> ndarray: - """Vector of types of all non-missing variables.""" - m = self._parent - df: pd.DataFrame = self.flat_vars - specs = [] - for name in m.variables: if name in m.binaries: - val = "B" + vtype = "B" elif name in m.integers: - val = "I" + vtype = "I" elif name in m.semi_continuous: - val = "S" + vtype = "S" else: - val = "C" - specs.append(pd.Series(val, index=m.variables[name].flat.labels)) - - ds = pd.concat(specs) - ds = df.set_index("key").labels.map(ds) - return create_vector(ds.index, ds.to_numpy(), fill_value="") - - @property - def lb(self) -> ndarray: - """Vector of lower bounds of all non-missing variables.""" - df: pd.DataFrame = self.flat_vars - return create_vector(df.key, df.lower) - - @cached_property - def sol(self) -> ndarray: - """Vector of solution values of all non-missing variables.""" - if not self._parent.status == "ok": - raise ValueError("Model is not optimized.") - if "solution" not in self.flat_vars: - del self.flat_vars # clear cache - df: pd.DataFrame = self.flat_vars - return create_vector(df.key, df.solution, fill_value=np.nan) - - @cached_property - def dual(self) -> ndarray: - """Vector of dual values of all non-missing constraints.""" - if not self._parent.status == "ok": - raise ValueError("Model is not optimized.") - if "dual" not in self.flat_cons: - del self.flat_cons # clear cache - df: pd.DataFrame = self.flat_cons - if "dual" not in df: - raise AttributeError( - "Underlying is optimized but does not have dual values stored." - ) - return create_vector(df.key, df.dual, fill_value=np.nan) - - @property - def ub(self) -> ndarray: - """Vector of upper bounds of all non-missing variables.""" - df: pd.DataFrame = self.flat_vars - return create_vector(df.key, df.upper) - - @property - def clabels(self) -> ndarray: - """Vector of labels of all non-missing constraints.""" - df: pd.DataFrame = self.flat_cons - if df.empty: - return np.array([], dtype=int) - return create_vector(df.key, df.labels, fill_value=-1) - - @property - def A(self) -> csc_matrix | None: - """Constraint matrix of all non-missing constraints and variables.""" + vtype = "C" + + lb_list.append(var.lower.values.ravel()[mask]) + ub_list.append(var.upper.values.ravel()[mask]) + vtypes_list.append(np.full(mask.sum(), vtype)) + + if lb_list: + self.lb: ndarray = np.concatenate(lb_list) + self.ub: ndarray = np.concatenate(ub_list) + self.vtypes: ndarray = np.concatenate(vtypes_list) + else: + self.lb = np.array([]) + self.ub = np.array([]) + self.vtypes = np.array([], dtype=object) + + def _build_cons(self) -> None: m = self._parent - if not len(m.constraints): - return None - A: csc_matrix = m.constraints.to_matrix(filter_missings=False) - return A[self.clabels][:, self.vlabels] - - @property - def sense(self) -> ndarray: - """Vector of senses of all non-missing constraints.""" - df: pd.DataFrame = self.flat_cons - return create_vector(df.key, df.sign.astype(np.dtype(" ndarray: - """Vector of right-hand-sides of all non-missing constraints.""" - df: pd.DataFrame = self.flat_cons - return create_vector(df.key, df.rhs) + if not len(m.constraints): + self.clabels: ndarray = np.array([], dtype=np.intp) + self.b: ndarray = np.array([]) + self.sense: ndarray = np.array([], dtype=object) + self.A: scipy.sparse.csr_array | None = None + return + + label_index = m.variables.label_index + csrs = [] + clabels_list = [] + b_list = [] + sense_list = [] + for c in m.constraints.data.values(): + csr, con_labels, b, sense = c.to_matrix_with_rhs(label_index) + csrs.append(csr) + clabels_list.append(con_labels) + b_list.append(b) + sense_list.append(sense) + + self.A = cast(scipy.sparse.csr_array, scipy.sparse.vstack(csrs, format="csr")) + self.clabels = np.concatenate(clabels_list) + self.b = np.concatenate(b_list) if b_list else np.array([]) + self.sense = ( + np.concatenate(sense_list) if sense_list else np.array([], dtype=object) + ) @property def c(self) -> ndarray: - """Vector of objective coefficients of all non-missing variables.""" + """Objective coefficients aligned with vlabels.""" m = self._parent - ds = m.objective.flat - if isinstance(m.objective.expression, expressions.QuadraticExpression): - ds = ds[(ds.vars1 == -1) | (ds.vars2 == -1)] - ds["vars"] = ds.vars1.where(ds.vars1 != -1, ds.vars2) + result = np.zeros(len(self.vlabels)) - vars: pd.Series = ds.vars.map(self.flat_vars.set_index("labels").key) - shape: int = self.flat_vars.key.max() + 1 - return create_vector(vars, ds.coeffs, fill_value=0.0, shape=shape) + label_index = m.variables.label_index + label_to_pos = label_index.label_to_pos + expr = m.objective.expression + if isinstance(expr, expressions.QuadraticExpression): + # vars has shape (_factor=2, _term); linear terms have one factor == -1 + vars_2d = expr.data.vars.values # shape (2, n_term) + coeffs_all = expr.data.coeffs.values.ravel() + vars1, vars2 = vars_2d[0], vars_2d[1] + linear = (vars1 == -1) | (vars2 == -1) + var_labels = np.where(vars1[linear] != -1, vars1[linear], vars2[linear]) + coeffs = coeffs_all[linear] + else: + var_labels = expr.data.vars.values.ravel() + coeffs = expr.data.coeffs.values.ravel() + + mask = var_labels != -1 + np.add.at(result, label_to_pos[var_labels[mask]], coeffs[mask]) + return result @property - def Q(self) -> csc_matrix | None: - """Matrix objective coefficients of quadratic terms of all non-missing variables.""" + def Q(self) -> scipy.sparse.csc_matrix | None: + """Quadratic objective matrix, shape (n_active_vars, n_active_vars).""" m = self._parent expr = m.objective.expression if not isinstance(expr, expressions.QuadraticExpression): return None return expr.to_matrix()[self.vlabels][:, self.vlabels] + + @property + def sol(self) -> ndarray: + """Solution values aligned with vlabels.""" + if not self._parent.status == "ok": + raise ValueError("Model is not optimized.") + m = self._parent + result = np.full(len(self.vlabels), np.nan) + label_index = m.variables.label_index + label_to_pos = label_index.label_to_pos + for _, var in m.variables.items(): + labels = var.labels.values.ravel() + mask = labels != -1 + positions = label_to_pos[labels[mask]] + result[positions] = var.solution.values.ravel()[mask] + return result + + @property + def dual(self) -> ndarray: + """Dual values aligned with clabels.""" + if not self._parent.status == "ok": + raise ValueError("Model is not optimized.") + m = self._parent + label_index = m.variables.label_index + dual_list = [] + has_dual = False + for c in m.constraints.data.values(): + if isinstance(c, CSRConstraint): + # _dual is active-only + if c._dual is not None: + dual_list.append(c._dual) + has_dual = True + else: + dual_list.append(np.full(len(c._con_labels), np.nan)) + else: + csr, _ = c.to_matrix(label_index) + nonempty = np.diff(csr.indptr).astype(bool) + active_rows = np.flatnonzero(nonempty) + if "dual" in c.data: + dual_list.append(c.dual.values.ravel()[active_rows]) + has_dual = True + else: + dual_list.append(np.full(len(active_rows), np.nan)) + if not has_dual: + raise AttributeError( + "Underlying is optimized but does not have dual values stored." + ) + return np.concatenate(dual_list) if dual_list else np.array([]) diff --git a/linopy/model.py b/linopy/model.py index c98d104b..040c6762 100644 --- a/linopy/model.py +++ b/linopy/model.py @@ -9,6 +9,7 @@ import logging import os import re +import warnings from collections.abc import Callable, Mapping, Sequence from pathlib import Path from tempfile import NamedTemporaryFile, gettempdir @@ -46,7 +47,13 @@ ModelStatus, TerminationCondition, ) -from linopy.constraints import AnonymousScalarConstraint, Constraint, Constraints +from linopy.constraints import ( + AnonymousScalarConstraint, + Constraint, + ConstraintBase, + Constraints, + CSRConstraint, +) from linopy.expressions import ( LinearExpression, QuadraticExpression, @@ -132,8 +139,6 @@ class Model: _chunk: T_Chunks _force_dim_names: bool _solver_dir: Path - matrices: MatrixAccessor - __slots__ = ( # containers "_variables", @@ -160,7 +165,6 @@ class Model: "_relaxed_registry", "solver_model", "solver_name", - "matrices", ) def __init__( @@ -219,7 +223,10 @@ def __init__( gettempdir() if solver_dir is None else solver_dir ) - self.matrices: MatrixAccessor = MatrixAccessor(self) + @property + def matrices(self) -> MatrixAccessor: + """Matrix representation of the model, computed fresh on each access.""" + return MatrixAccessor(self) @property def variables(self) -> Variables: @@ -719,6 +726,38 @@ def add_sos_constraints( add_piecewise_constraints = add_piecewise_constraints + @overload + def add_constraints( + self, + lhs: VariableLike + | ExpressionLike + | ConstraintLike + | Sequence[tuple[ConstantLike, VariableLike | str]] + | Callable, + sign: SignLike | None = ..., + rhs: ConstantLike | VariableLike | ExpressionLike | None = ..., + name: str | None = ..., + coords: Sequence[Sequence | pd.Index | DataArray] | Mapping | None = ..., + mask: MaskLike | None = ..., + freeze: Literal[True] = ..., + ) -> CSRConstraint: ... + + @overload + def add_constraints( + self, + lhs: VariableLike + | ExpressionLike + | ConstraintLike + | Sequence[tuple[ConstantLike, VariableLike | str]] + | Callable, + sign: SignLike | None = ..., + rhs: ConstantLike | VariableLike | ExpressionLike | None = ..., + name: str | None = ..., + coords: Sequence[Sequence | pd.Index | DataArray] | Mapping | None = ..., + mask: MaskLike | None = ..., + freeze: Literal[False] = ..., + ) -> Constraint: ... + def add_constraints( self, lhs: VariableLike @@ -731,7 +770,8 @@ def add_constraints( name: str | None = None, coords: Sequence[Sequence | pd.Index | DataArray] | Mapping | None = None, mask: MaskLike | None = None, - ) -> Constraint: + freeze: bool = True, + ) -> ConstraintBase: """ Assign a new, possibly multi-dimensional array of constraints to the model. @@ -743,7 +783,7 @@ def add_constraints( Parameters ---------- - lhs : linopy.LinearExpression/linopy.Constraint/callable + lhs : linopy.LinearExpression/linopy.ConstraintBase/callable Left hand side of the constraint(s) or optionally full constraint. In case a linear expression is passed, `sign` and `rhs` must not be None. @@ -765,12 +805,14 @@ def add_constraints( Boolean mask with False values for constraints which are skipped. The shape of the mask has to match the shape the added constraints. Default is None. - + freeze : bool, optional + If True, convert the constraint to an immutable CSR-backed Constraint + before returning. Default is True. Returns ------- - labels : linopy.model.Constraint - Array containing the labels of the added constraints. + constraint : linopy.ConstraintBase + The added constraint (MutableConstraint by default, or Constraint if freeze=True). """ msg_sign_rhs_none = f"Arguments `sign` and `rhs` cannot be None when passing along with a {type(lhs)}." @@ -811,7 +853,7 @@ def add_constraints( if sign is not None or rhs is not None: raise ValueError(msg_sign_rhs_none) data = lhs.to_constraint().data - elif isinstance(lhs, Constraint): + elif isinstance(lhs, ConstraintBase): if sign is not None or rhs is not None: raise ValueError(msg_sign_rhs_none) data = lhs.data @@ -889,8 +931,7 @@ def add_constraints( data = data.chunk(self.chunk) constraint = Constraint(data, name=name, model=self, skip_broadcast=True) - self.constraints.add(constraint) - return constraint + return self.constraints.add(constraint, freeze=freeze and not self.chunk) def add_objective( self, @@ -944,6 +985,8 @@ def remove_variables(self, name: str) -> None: """ from linopy.constants import FIX_CONSTRAINT_PREFIX + variable = self.variables[name] + # Clean up fix constraint if present fix_name = f"{FIX_CONSTRAINT_PREFIX}{name}" if fix_name in self.constraints: @@ -952,18 +995,24 @@ def remove_variables(self, name: str) -> None: # Clean up relaxed registry if present self._relaxed_registry.pop(name, None) - labels = self.variables[name].labels - self.variables.remove(name) + to_remove = [ + k for k, con in self.constraints.items() if con.has_variable(variable) + ] - for k in list(self.constraints): - vars = self.constraints[k].data["vars"] - vars = vars.where(~vars.isin(labels), -1) - self.constraints[k]._data = assign_multiindex_safe( - self.constraints[k].data, vars=vars + if to_remove: + warnings.warn( + f"Removing variable '{name}' also removes constraints {to_remove} " + "because they reference this variable.", + UserWarning, + stacklevel=2, ) + for k in to_remove: + self.constraints.remove(k) + + self.variables.remove(name) self.objective = self.objective.sel( - {TERM_DIM: ~self.objective.vars.isin(labels)} + {TERM_DIM: ~self.objective.vars.isin(variable.labels)} ) def remove_constraints(self, name: str | list[str]) -> None: @@ -1310,7 +1359,7 @@ def solve( sanitize_zeros: bool = True, sanitize_infinities: bool = True, slice_size: int = 2_000_000, - remote: RemoteHandler | OetcHandler = None, # type: ignore + remote: RemoteHandler | OetcHandler | None = None, progress: bool | None = None, mock_solve: bool = False, reformulate_sos: bool | Literal["auto"] = False, @@ -1405,9 +1454,6 @@ def solve( sanitize_zeros=sanitize_zeros, sanitize_infinities=sanitize_infinities ) - # clear cached matrix properties potentially present from previous solve commands - self.matrices.clean_cached_properties() - # check io_api if io_api is not None and io_api not in IO_APIS: raise ValueError( @@ -1623,9 +1669,6 @@ def _mock_solve( ) -> tuple[str, str]: solver_name = "mock" - # clear cached matrix properties potentially present from previous solve commands - self.matrices.clean_cached_properties() - logger.info(f" Solve problem using {solver_name.title()} solver") # reset result self.reset_solution() diff --git a/linopy/piecewise.py b/linopy/piecewise.py index 78f7be65..deb0ef3c 100644 --- a/linopy/piecewise.py +++ b/linopy/piecewise.py @@ -40,7 +40,7 @@ ) if TYPE_CHECKING: - from linopy.constraints import Constraint + from linopy.constraints import ConstraintBase from linopy.expressions import LinearExpression from linopy.model import Model from linopy.types import LinExprLike @@ -686,7 +686,7 @@ def _add_pwl_lp( sign: str, x_points: DataArray, y_points: DataArray, -) -> Constraint: +) -> ConstraintBase: """Add pure LP tangent-line constraints.""" dx = x_points.diff(BREAKPOINT_DIM) dy = y_points.diff(BREAKPOINT_DIM) @@ -729,7 +729,7 @@ def _add_pwl_sos2_core( y_points: DataArray, lambda_mask: DataArray | None, active: LinearExpression | None = None, -) -> Constraint: +) -> ConstraintBase: """ Core SOS2 formulation linking x_expr and target_expr via breakpoints. @@ -780,7 +780,7 @@ def _add_pwl_incremental_core( y_points: DataArray, bp_mask: DataArray | None, active: LinearExpression | None = None, -) -> Constraint: +) -> ConstraintBase: """ Core incremental formulation linking x_expr and target_expr. @@ -846,7 +846,7 @@ def _add_pwl_incremental_core( model.add_constraints(delta_var <= binary_var, name=inc_link_name) # Order constraints: y_{i+1} ≤ δ_i for i = 0..n-2 - fill_con: Constraint | None = None + fill_con: ConstraintBase | None = None if n_segments >= 2: delta_lo = delta_var.isel({LP_SEG_DIM: slice(None, -1)}, drop=True) delta_hi = delta_var.isel({LP_SEG_DIM: slice(1, None)}, drop=True) @@ -884,7 +884,7 @@ def _add_dpwl_sos2_core( y_points: DataArray, lambda_mask: DataArray | None, active: LinearExpression | None = None, -) -> Constraint: +) -> ConstraintBase: """ Core disjunctive SOS2 formulation with separate x/y points. @@ -948,11 +948,11 @@ def _add_dpwl_sos2_core( def add_piecewise_constraints( model: Model, - descriptor: PiecewiseConstraintDescriptor | Constraint, + descriptor: PiecewiseConstraintDescriptor | ConstraintBase, method: Literal["sos2", "incremental", "auto", "lp"] = "auto", name: str | None = None, skip_nan_check: bool = False, -) -> Constraint: +) -> ConstraintBase: """ Add a piecewise linear constraint from a :class:`PiecewiseConstraintDescriptor`. @@ -975,7 +975,7 @@ def add_piecewise_constraints( Returns ------- - Constraint + ConstraintBase """ if not isinstance(descriptor, PiecewiseConstraintDescriptor): raise TypeError( @@ -1064,7 +1064,7 @@ def _add_continuous( method: str, skip_nan_check: bool, active: LinearExpression | None = None, -) -> Constraint: +) -> ConstraintBase: """Handle continuous (non-disjunctive) piecewise constraints.""" convexity: Literal["convex", "concave", "linear", "mixed"] | None = None @@ -1182,7 +1182,7 @@ def _add_disjunctive( mask: DataArray | None, method: str, active: LinearExpression | None = None, -) -> Constraint: +) -> ConstraintBase: """Handle disjunctive piecewise constraints.""" if method == "lp": raise ValueError("Pure LP method is not supported for disjunctive constraints") diff --git a/linopy/solvers.py b/linopy/solvers.py index 10731547..8309113b 100644 --- a/linopy/solvers.py +++ b/linopy/solvers.py @@ -54,7 +54,7 @@ FILE_IO_APIS = ["lp", "lp-polars", "mps"] IO_APIS = FILE_IO_APIS + ["direct"] -available_solvers = [] +available_solvers: list[str] = [] which = "where" if os.name == "nt" else "which" @@ -1721,20 +1721,23 @@ def get_solver_solution() -> Solution: sol = pd.Series(m.getSolution(), index=var, dtype=float) try: - try: # Try new API first - _dual = m.getDuals() - except AttributeError: # Fallback to old API - _dual = m.getDual() - - try: # Try new API first - constraints = m.getNameList( - xpress_Namespaces.ROW, 0, m.attributes.rows - 1 - ) - except AttributeError: # Fallback to old API - constraints = m.getnamelist( - xpress_Namespaces.ROW, 0, m.attributes.rows - 1 - ) - dual = pd.Series(_dual, index=constraints, dtype=float) + if m.attributes.rows == 0: + dual = pd.Series(dtype=float) + else: + try: # Try new API first + _dual = m.getDuals() + except AttributeError: # Fallback to old API + _dual = m.getDual() + + try: # Try new API first + constraints = m.getNameList( + xpress_Namespaces.ROW, 0, m.attributes.rows - 1 + ) + except AttributeError: # Fallback to old API + constraints = m.getnamelist( + xpress_Namespaces.ROW, 0, m.attributes.rows - 1 + ) + dual = pd.Series(_dual, index=constraints, dtype=float) except (xpress.SolverError, xpress.ModelError, SystemError): logger.warning("Dual values of MILP couldn't be parsed") dual = pd.Series(dtype=float) diff --git a/linopy/testing.py b/linopy/testing.py index 0392064e..e6a58a0d 100644 --- a/linopy/testing.py +++ b/linopy/testing.py @@ -2,7 +2,7 @@ from xarray.testing import assert_equal -from linopy.constraints import Constraint, _con_unwrap +from linopy.constraints import ConstraintBase, _con_unwrap from linopy.expressions import LinearExpression, QuadraticExpression, _expr_unwrap from linopy.model import Model from linopy.variables import Variable, _var_unwrap @@ -29,7 +29,7 @@ def assert_quadequal( return assert_equal(_expr_unwrap(a), _expr_unwrap(b)) -def assert_conequal(a: Constraint, b: Constraint, strict: bool = True) -> None: +def assert_conequal(a: ConstraintBase, b: ConstraintBase, strict: bool = True) -> None: """ Assert that two constraints are equal. diff --git a/linopy/types.py b/linopy/types.py index 7238c552..34f7a5a9 100644 --- a/linopy/types.py +++ b/linopy/types.py @@ -2,7 +2,7 @@ from collections.abc import Hashable, Iterable, Mapping, Sequence from pathlib import Path -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING, TypeAlias, Union import numpy import polars as pl @@ -11,7 +11,10 @@ from xarray.core.coordinates import DataArrayCoordinates, DatasetCoordinates if TYPE_CHECKING: - from linopy.constraints import AnonymousScalarConstraint, Constraint + from linopy.constraints import ( + AnonymousScalarConstraint, + ConstraintBase, + ) from linopy.expressions import ( LinearExpression, QuadraticExpression, @@ -20,37 +23,43 @@ from linopy.piecewise import PiecewiseConstraintDescriptor from linopy.variables import ScalarVariable, Variable -# Type aliases using Union for Python 3.9 compatibility -CoordsLike = Union[ # noqa: UP007 - Sequence[Sequence | Index | DataArray], - Mapping, - DataArrayCoordinates, - DatasetCoordinates, -] -DimsLike = Union[str, Iterable[Hashable]] # noqa: UP007 +CoordsLike: TypeAlias = ( + Sequence[Sequence | Index | DataArray] + | Mapping + | DataArrayCoordinates + | DatasetCoordinates +) +DimsLike: TypeAlias = str | Iterable[Hashable] + +ConstantLike: TypeAlias = ( + int + | float + | numpy.floating + | numpy.integer + | numpy.ndarray + | DataArray + | Series + | DataFrame + | pl.Series +) +SignLike: TypeAlias = str | numpy.ndarray | DataArray | Series | DataFrame +MaskLike: TypeAlias = numpy.ndarray | DataArray | Series | DataFrame +PathLike: TypeAlias = str | Path -ConstantLike = Union[ # noqa: UP007 - int, - float, - numpy.floating, - numpy.integer, - numpy.ndarray, - DataArray, - Series, - DataFrame, - pl.Series, +# These reference types only available under TYPE_CHECKING, so use Union with strings +VariableLike: TypeAlias = Union["ScalarVariable", "Variable"] +ExpressionLike: TypeAlias = Union[ + "ScalarLinearExpression", "LinearExpression", "QuadraticExpression" ] -SignLike = Union[str, numpy.ndarray, DataArray, Series, DataFrame] # noqa: UP007 -VariableLike = Union["ScalarVariable", "Variable"] -ExpressionLike = Union[ +ConstraintLike: TypeAlias = Union[ + "ConstraintBase", "AnonymousScalarConstraint", "PiecewiseConstraintDescriptor" +] +LinExprLike: TypeAlias = Union["Variable", "LinearExpression"] +SideLike: TypeAlias = Union[ + ConstantLike, + "ScalarVariable", + "Variable", "ScalarLinearExpression", "LinearExpression", "QuadraticExpression", ] -ConstraintLike = Union[ - "Constraint", "AnonymousScalarConstraint", "PiecewiseConstraintDescriptor" -] -LinExprLike = Union["Variable", "LinearExpression"] -MaskLike = Union[numpy.ndarray, DataArray, Series, DataFrame] # noqa: UP007 -SideLike = Union[ConstantLike, VariableLike, ExpressionLike] # noqa: UP007 -PathLike = Union[str, Path] # noqa: UP007 diff --git a/linopy/variables.py b/linopy/variables.py index 2d17fef8..6cdaa83f 100644 --- a/linopy/variables.py +++ b/linopy/variables.py @@ -27,12 +27,14 @@ from xarray import DataArray, Dataset, broadcast from xarray.core.coordinates import DatasetCoordinates from xarray.core.indexes import Indexes +from xarray.core.types import JoinOptions from xarray.core.utils import Frozen import linopy.expressions as expressions from linopy.common import ( LabelPositionIndex, LocIndexer, + VariableLabelIndex, as_dataarray, assign_multiindex_safe, check_has_nulls, @@ -578,7 +580,7 @@ def __contains__(self, value: str) -> bool: return self.data.__contains__(value) def add( - self, other: SideLike, join: str | None = None + self, other: SideLike, join: JoinOptions | None = None ) -> LinearExpression | QuadraticExpression: """ Add variables to linear expressions or other variables. @@ -595,7 +597,7 @@ def add( return self.to_linexpr().add(other, join=join) def sub( - self, other: SideLike, join: str | None = None + self, other: SideLike, join: JoinOptions | None = None ) -> LinearExpression | QuadraticExpression: """ Subtract linear expressions or other variables from the variables. @@ -612,7 +614,7 @@ def sub( return self.to_linexpr().sub(other, join=join) def mul( - self, other: ConstantLike, join: str | None = None + self, other: ConstantLike, join: JoinOptions | None = None ) -> LinearExpression | QuadraticExpression: """ Multiply variables with a coefficient. @@ -629,7 +631,7 @@ def mul( return self.to_linexpr().mul(other, join=join) def div( - self, other: ConstantLike, join: str | None = None + self, other: ConstantLike, join: JoinOptions | None = None ) -> LinearExpression | QuadraticExpression: """ Divide variables with a coefficient. @@ -645,7 +647,7 @@ def div( """ return self.to_linexpr().div(other, join=join) - def le(self, rhs: SideLike, join: str | None = None) -> Constraint: + def le(self, rhs: SideLike, join: JoinOptions | None = None) -> Constraint: """ Less than or equal constraint. @@ -660,7 +662,7 @@ def le(self, rhs: SideLike, join: str | None = None) -> Constraint: """ return self.to_linexpr().le(rhs, join=join) - def ge(self, rhs: SideLike, join: str | None = None) -> Constraint: + def ge(self, rhs: SideLike, join: JoinOptions | None = None) -> Constraint: """ Greater than or equal constraint. @@ -675,7 +677,7 @@ def ge(self, rhs: SideLike, join: str | None = None) -> Constraint: """ return self.to_linexpr().ge(rhs, join=join) - def eq(self, rhs: SideLike, join: str | None = None) -> Constraint: + def eq(self, rhs: SideLike, join: JoinOptions | None = None) -> Constraint: """ Equality constraint. @@ -1419,6 +1421,7 @@ class Variables: data: dict[str, Variable] model: Model _label_position_index: LabelPositionIndex | None = None + _variable_label_index: VariableLabelIndex | None = None dataset_attrs = ["labels", "lower", "upper"] dataset_names = ["Labels", "Lower bounds", "Upper bounds"] @@ -1528,6 +1531,15 @@ def _invalidate_label_position_index(self) -> None: """Invalidate the label position index cache.""" if self._label_position_index is not None: self._label_position_index.invalidate() + if self._variable_label_index is not None: + self._variable_label_index.invalidate() + + @property + def label_index(self) -> VariableLabelIndex: + """Index for O(1) label->position mapping and compact vlabels array.""" + if self._variable_label_index is None: + self._variable_label_index = VariableLabelIndex(self) + return self._variable_label_index @property def attrs(self) -> dict[Any, Any]: diff --git a/test/test_constraint.py b/test/test_constraint.py index bfd29a6e..e1b9f4b6 100644 --- a/test/test_constraint.py +++ b/test/test_constraint.py @@ -24,7 +24,12 @@ short_LESS_EQUAL, sign_replace_dict, ) -from linopy.constraints import AnonymousScalarConstraint, Constraint, Constraints +from linopy.constraints import ( + AnonymousScalarConstraint, + Constraint, + ConstraintBase, + Constraints, +) @pytest.fixture @@ -48,19 +53,39 @@ def y(m: Model) -> linopy.Variable: @pytest.fixture -def c(m: Model) -> linopy.constraints.Constraint: +def c(m: Model) -> linopy.constraints.ConstraintBase: return m.constraints["c"] -def test_constraint_repr(c: linopy.constraints.Constraint) -> None: +@pytest.fixture +def mc(m: Model) -> linopy.constraints.MutableConstraint: + return m.constraints["c"].mutable() + + +def test_constraint_repr(c: linopy.constraints.CSRConstraint) -> None: c.__repr__() +def test_constraint_repr_equivalent_to_mutable( + c: linopy.constraints.CSRConstraint, +) -> None: + """Constraint (CSR-backed) and MutableConstraint repr must be identical.""" + frozen = c.freeze() + assert repr(frozen) == repr(c) + + def test_constraints_repr(m: Model) -> None: m.constraints.__repr__() -def test_constraint_name(c: linopy.constraints.Constraint) -> None: +def test_add_constraints_freeze(m: Model, x: linopy.Variable) -> None: + c = m.add_constraints(x >= 1, name="frozen_c", freeze=True) + assert isinstance(c, linopy.constraints.CSRConstraint) + assert isinstance(m.constraints["frozen_c"], linopy.constraints.CSRConstraint) + assert c.ncons == 10 + + +def test_constraint_name(c: linopy.constraints.CSRConstraint) -> None: assert c.name == "c" @@ -75,7 +100,7 @@ def test_cannot_create_constraint_without_variable() -> None: _ = linopy.LinearExpression(12, model) == linopy.LinearExpression(13, model) -def test_constraints_getter(m: Model, c: linopy.constraints.Constraint) -> None: +def test_constraints_getter(m: Model, c: linopy.constraints.CSRConstraint) -> None: assert c.shape == (10,) assert isinstance(m.constraints[["c"]], Constraints) @@ -228,7 +253,7 @@ def test_constraint_wrapped_methods(x: linopy.Variable, y: linopy.Variable) -> N def test_anonymous_constraint_sel(x: linopy.Variable, y: linopy.Variable) -> None: expr = 10 * x + y con = expr <= 10 - assert isinstance(con.sel(first=[1, 2]), Constraint) + assert isinstance(con.sel(first=[1, 2]), ConstraintBase) def test_anonymous_constraint_swap_dims(x: linopy.Variable, y: linopy.Variable) -> None: @@ -236,7 +261,7 @@ def test_anonymous_constraint_swap_dims(x: linopy.Variable, y: linopy.Variable) con = expr <= 10 con = con.assign_coords({"third": ("second", con.indexes["second"] + 100)}) con = con.swap_dims({"second": "third"}) - assert isinstance(con, Constraint) + assert isinstance(con, ConstraintBase) assert con.coord_dims == ("first", "third") @@ -245,7 +270,7 @@ def test_anonymous_constraint_set_index(x: linopy.Variable, y: linopy.Variable) con = expr <= 10 con = con.assign_coords({"third": ("second", con.indexes["second"] + 100)}) con = con.set_index({"multi": ["second", "third"]}) - assert isinstance(con, Constraint) + assert isinstance(con, ConstraintBase) assert con.coord_dims == ( "first", "multi", @@ -256,13 +281,13 @@ def test_anonymous_constraint_set_index(x: linopy.Variable, y: linopy.Variable) def test_anonymous_constraint_loc(x: linopy.Variable, y: linopy.Variable) -> None: expr = 10 * x + y con = expr <= 10 - assert isinstance(con.loc[[1, 2]], Constraint) + assert isinstance(con.loc[[1, 2]], ConstraintBase) def test_anonymous_constraint_getitem(x: linopy.Variable, y: linopy.Variable) -> None: expr = 10 * x + y con = expr <= 10 - assert isinstance(con[1], Constraint) + assert isinstance(con[1], ConstraintBase) def test_constraint_from_rule(m: Model, x: linopy.Variable, y: linopy.Variable) -> None: @@ -271,7 +296,7 @@ def bound(m: Model, i: int, j: int) -> AnonymousScalarConstraint: coords = [x.coords["first"], y.coords["second"]] con = Constraint.from_rule(m, bound, coords) - assert isinstance(con, Constraint) + assert isinstance(con, ConstraintBase) assert con.lhs.nterm == 2 repr(con) # test repr @@ -286,7 +311,7 @@ def bound(m: Model, i: int, j: int) -> AnonymousScalarConstraint | None: coords = [x.coords["first"], y.coords["second"]] con = Constraint.from_rule(m, bound, coords) - assert isinstance(con, Constraint) + assert isinstance(con, ConstraintBase) assert isinstance(con.lhs.vars, xr.DataArray) assert con.lhs.nterm == 2 assert (con.lhs.vars.loc[0, :] == -1).all() @@ -295,153 +320,158 @@ def bound(m: Model, i: int, j: int) -> AnonymousScalarConstraint | None: def test_constraint_vars_getter( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - assert_equal(c.vars.squeeze(), x.labels) + assert_equal(mc.vars.squeeze(), x.labels) -def test_constraint_coeffs_getter(c: linopy.constraints.Constraint) -> None: - assert (c.coeffs == 1).all() +def test_constraint_coeffs_getter(mc: linopy.constraints.MutableConstraint) -> None: + assert (mc.coeffs == 1).all() -def test_constraint_sign_getter(c: linopy.constraints.Constraint) -> None: +def test_constraint_sign_getter(c: linopy.constraints.CSRConstraint) -> None: assert (c.sign == GREATER_EQUAL).all() -def test_constraint_rhs_getter(c: linopy.constraints.Constraint) -> None: +def test_constraint_rhs_getter(c: linopy.constraints.CSRConstraint) -> None: assert (c.rhs == 0).all() def test_constraint_vars_setter( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - c.vars = x - assert_equal(c.vars, x.labels) + mc.vars = x + assert_equal(mc.vars, x.labels) def test_constraint_vars_setter_with_array( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - c.vars = x.labels - assert_equal(c.vars, x.labels) + mc.vars = x.labels + assert_equal(mc.vars, x.labels) def test_constraint_vars_setter_invalid( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: with pytest.raises(TypeError): - c.vars = pd.DataFrame(x.labels) + mc.vars = pd.DataFrame(x.labels) -def test_constraint_coeffs_setter(c: linopy.constraints.Constraint) -> None: - c.coeffs = 3 - assert (c.coeffs == 3).all() +def test_constraint_coeffs_setter(mc: linopy.constraints.MutableConstraint) -> None: + mc.coeffs = 3 + assert (mc.coeffs == 3).all() def test_constraint_lhs_setter( - c: linopy.constraints.Constraint, x: linopy.Variable, y: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable, y: linopy.Variable ) -> None: - c.lhs = x + y - assert c.lhs.nterm == 2 - assert c.vars.notnull().all().item() - assert c.coeffs.notnull().all().item() + mc.lhs = x + y + assert mc.lhs.nterm == 2 + assert mc.vars.notnull().all().item() + assert mc.coeffs.notnull().all().item() def test_constraint_lhs_setter_with_variable( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - c.lhs = x - assert c.lhs.nterm == 1 + mc.lhs = x + assert mc.lhs.nterm == 1 -def test_constraint_lhs_setter_with_constant(c: linopy.constraints.Constraint) -> None: - sizes = c.sizes - c.lhs = 10 - assert (c.rhs == -10).all() - assert c.lhs.nterm == 0 - assert c.sizes["first"] == sizes["first"] +def test_constraint_lhs_setter_with_constant( + mc: linopy.constraints.MutableConstraint, +) -> None: + sizes = mc.sizes + mc.lhs = 10 + assert (mc.rhs == -10).all() + assert mc.lhs.nterm == 0 + assert mc.sizes["first"] == sizes["first"] -def test_constraint_sign_setter(c: linopy.constraints.Constraint) -> None: - c.sign = EQUAL - assert (c.sign == EQUAL).all() +def test_constraint_sign_setter(mc: linopy.constraints.MutableConstraint) -> None: + mc.sign = EQUAL + assert (mc.sign == EQUAL).all() -def test_constraint_sign_setter_alternative(c: linopy.constraints.Constraint) -> None: - c.sign = long_EQUAL - assert (c.sign == EQUAL).all() +def test_constraint_sign_setter_alternative( + mc: linopy.constraints.MutableConstraint, +) -> None: + mc.sign = long_EQUAL + assert (mc.sign == EQUAL).all() -def test_constraint_sign_setter_invalid(c: linopy.constraints.Constraint) -> None: +def test_constraint_sign_setter_invalid( + mc: linopy.constraints.MutableConstraint, +) -> None: # Test that assigning lhs with other type that LinearExpression raises TypeError with pytest.raises(ValueError): - c.sign = "asd" + mc.sign = "asd" -def test_constraint_rhs_setter(c: linopy.constraints.Constraint) -> None: - sizes = c.sizes - c.rhs = 2 # type: ignore - assert (c.rhs == 2).all() - assert c.sizes == sizes +def test_constraint_rhs_setter(mc: linopy.constraints.MutableConstraint) -> None: + sizes = mc.sizes + mc.rhs = 2 # type: ignore + assert (mc.rhs == 2).all() + assert mc.sizes == sizes def test_constraint_rhs_setter_with_variable( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - c.rhs = x # type: ignore - assert (c.rhs == 0).all() - assert (c.coeffs.isel({c.term_dim: -1}) == -1).all() - assert c.lhs.nterm == 2 + mc.rhs = x # type: ignore + assert (mc.rhs == 0).all() + assert (mc.coeffs.isel({mc.term_dim: -1}) == -1).all() + assert mc.lhs.nterm == 2 def test_constraint_rhs_setter_with_expression( - c: linopy.constraints.Constraint, x: linopy.Variable, y: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable, y: linopy.Variable ) -> None: - c.rhs = x + y - assert (c.rhs == 0).all() - assert (c.coeffs.isel({c.term_dim: -1}) == -1).all() - assert c.lhs.nterm == 3 + mc.rhs = x + y + assert (mc.rhs == 0).all() + assert (mc.coeffs.isel({mc.term_dim: -1}) == -1).all() + assert mc.lhs.nterm == 3 def test_constraint_rhs_setter_with_expression_and_constant( - c: linopy.constraints.Constraint, x: linopy.Variable + mc: linopy.constraints.MutableConstraint, x: linopy.Variable ) -> None: - c.rhs = x + 1 - assert (c.rhs == 1).all() - assert (c.coeffs.sum(c.term_dim) == 0).all() - assert c.lhs.nterm == 2 + mc.rhs = x + 1 + assert (mc.rhs == 1).all() + assert (mc.coeffs.sum(mc.term_dim) == 0).all() + assert mc.lhs.nterm == 2 -def test_constraint_labels_setter_invalid(c: linopy.constraints.Constraint) -> None: - # Test that assigning labels raises FrozenInstanceError +def test_constraint_labels_setter_invalid(c: linopy.constraints.CSRConstraint) -> None: + # Test that assigning labels raises AttributeError (Constraint is frozen) with pytest.raises(AttributeError): c.labels = c.labels # type: ignore -def test_constraint_sel(c: linopy.constraints.Constraint) -> None: - assert isinstance(c.sel(first=[1, 2]), Constraint) - assert isinstance(c.isel(first=[1, 2]), Constraint) +def test_constraint_sel(c: linopy.constraints.CSRConstraint) -> None: + assert isinstance(c.mutable().sel(first=[1, 2]), ConstraintBase) + assert isinstance(c.mutable().isel(first=[1, 2]), ConstraintBase) -def test_constraint_flat(c: linopy.constraints.Constraint) -> None: +def test_constraint_flat(c: linopy.constraints.CSRConstraint) -> None: assert isinstance(c.flat, pd.DataFrame) -def test_iterate_slices(c: linopy.constraints.Constraint) -> None: - for i in c.iterate_slices(slice_size=2): - assert isinstance(i, Constraint) - assert c.coord_dims == i.coord_dims +def test_iterate_slices(mc: linopy.constraints.MutableConstraint) -> None: + for i in mc.iterate_slices(slice_size=2): + assert isinstance(i, ConstraintBase) + assert mc.coord_dims == i.coord_dims -def test_constraint_to_polars(c: linopy.constraints.Constraint) -> None: +def test_constraint_to_polars(c: linopy.constraints.CSRConstraint) -> None: assert isinstance(c.to_polars(), pl.DataFrame) def test_constraint_to_polars_mixed_signs(m: Model, x: linopy.Variable) -> None: """Test to_polars when a constraint has mixed sign values across dims.""" - # Create a constraint, then manually patch the sign to have mixed values - m.add_constraints(x >= 0, name="mixed") - con = m.constraints["mixed"] + # Use MutableConstraint so sign data can be patched + con = m.add_constraints(x >= 0, name="mixed", freeze=False) # Replace sign data with mixed signs across the first dimension n = con.data.sizes["first"] signs = np.array(["<=" if i % 2 == 0 else ">=" for i in range(n)]) @@ -464,8 +494,12 @@ def test_constraint_assignment_sanitize_zeros( ) -> None: m.add_constraints(0 * x + y == 0, name="c2") m.constraints.sanitize_zeros() - assert m.constraints["c2"].vars[0, 0, 0].item() == -1 - assert np.isnan(m.constraints["c2"].coeffs[0, 0, 0].item()) + c2 = m.constraints["c2"] + assert c2.nterm == 1 + assert c2.has_variable(y) + assert not c2.has_variable(x) + csr, _ = c2.to_matrix(m.variables.label_index) + assert (csr.data == 1).all() def test_constraint_assignment_with_args( @@ -588,8 +622,11 @@ def test_constraint_with_helper_dims_as_coords(m: Model) -> None: def test_constraint_matrix(m: Model) -> None: - A = m.constraints.to_matrix() - assert A.shape == (10, 14) + # Returns (csr_array, con_labels) — dense: active rows and active-var columns + A, con_labels = m.constraints.to_matrix() + n_active_vars = len(m.variables.label_index.vlabels) + assert A.shape == (10, n_active_vars) + assert len(con_labels) == 10 def test_constraint_matrix_masked_variables() -> None: @@ -600,54 +637,48 @@ def test_constraint_matrix_masked_variables() -> None: missing. The matrix shoud not be built for constraints which have variables which are missing. """ - # now with missing variables m = Model() mask = pd.Series([False] * 5 + [True] * 5) x = m.add_variables(coords=[range(10)], mask=mask) m.add_variables() m.add_constraints(x, EQUAL, 0) - A = m.constraints.to_matrix(filter_missings=True) - assert A.shape == (5, 6) - assert A.shape == (m.ncons, m.nvars) - - A = m.constraints.to_matrix(filter_missings=False) - assert A.shape == m.shape + # Returns dense matrix: active rows only, all active-var columns + A, con_labels = m.constraints.to_matrix() + n_active_vars = len(m.variables.label_index.vlabels) + assert A.shape == (m.ncons, n_active_vars) + assert len(con_labels) == m.ncons def test_constraint_matrix_masked_constraints() -> None: """ Test constraint matrix with missing constraints. """ - # now with missing variables m = Model() mask = pd.Series([False] * 5 + [True] * 5) x = m.add_variables(coords=[range(10)]) m.add_variables() m.add_constraints(x, EQUAL, 0, mask=mask) - A = m.constraints.to_matrix(filter_missings=True) - assert A.shape == (5, 11) - assert A.shape == (m.ncons, m.nvars) - - A = m.constraints.to_matrix(filter_missings=False) - assert A.shape == m.shape + # active cons are indices 5-9, which reference vars 5-9 only (all active) + A, con_labels = m.constraints.to_matrix() + n_active_vars = len(m.variables.label_index.vlabels) + assert A.shape == (m.ncons, n_active_vars) + assert len(con_labels) == m.ncons def test_constraint_matrix_masked_constraints_and_variables() -> None: """ - Test constraint matrix with missing constraints. + Test constraint matrix with missing constraints and variables. """ - # now with missing variables m = Model() mask = pd.Series([False] * 5 + [True] * 5) x = m.add_variables(coords=[range(10)], mask=mask) m.add_variables() m.add_constraints(x, EQUAL, 0, mask=mask) - A = m.constraints.to_matrix(filter_missings=True) - assert A.shape == (5, 6) - assert A.shape == (m.ncons, m.nvars) - - A = m.constraints.to_matrix(filter_missings=False) - assert A.shape == m.shape + # both masks align: 5 active cons x all active vars (5 x + 1 scalar) + A, con_labels = m.constraints.to_matrix() + n_active_vars = len(m.variables.label_index.vlabels) + assert A.shape == (m.ncons, n_active_vars) + assert len(con_labels) == m.ncons def test_get_name_by_label() -> None: @@ -674,3 +705,185 @@ def test_constraints_inequalities(m: Model) -> None: def test_constraints_equalities(m: Model) -> None: assert isinstance(m.constraints.equalities, Constraints) + + +def test_freeze_mutable_roundtrip(m: Model) -> None: + frozen = m.constraints["c"] + assert isinstance(frozen, linopy.constraints.CSRConstraint) + mc = frozen.mutable() + assert isinstance(mc, Constraint) + refrozen = linopy.constraints.CSRConstraint.from_mutable(mc, frozen._cindex) + assert_equal(frozen.labels, refrozen.labels) + assert_equal(frozen.rhs, refrozen.rhs) + assert_equal(frozen.sign, refrozen.sign) + np.testing.assert_array_equal(frozen._csr.toarray(), refrozen._csr.toarray()) + np.testing.assert_array_equal(frozen._con_labels, refrozen._con_labels) + + +def test_freeze_mutable_roundtrip_with_masking() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(5, name="i")], name="x") + mask = xr.DataArray([True, False, True, False, True], dims=["i"]) + m.add_constraints(x.where(mask) >= 0, name="c") + frozen = m.constraints["c"] + mc = frozen.mutable() + refrozen = linopy.constraints.CSRConstraint.from_mutable(mc, frozen._cindex) + assert_equal(frozen.labels, refrozen.labels) + assert_equal(frozen.rhs, refrozen.rhs) + assert frozen.ncons == refrozen.ncons == 3 + + +def test_from_mutable_mixed_signs() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(3, name="i")], name="x") + m.add_constraints(x >= 0, name="mixed", freeze=False) + mc = m.constraints["mixed"] + assert isinstance(mc, Constraint) + mc._data["sign"] = xr.DataArray(["<=", ">=", "<="], dims=["i"]) + frozen = linopy.constraints.CSRConstraint.from_mutable(mc) + assert isinstance(frozen._sign, np.ndarray) + assert list(frozen._sign) == ["<=", ">=", "<="] + assert_equal(frozen.sign, mc.sign) + + +def test_variable_label_index(m: Model) -> None: + li = m.variables.label_index + assert li.n_active_vars > 0 + assert len(li.vlabels) == li.n_active_vars + assert li.label_to_pos.shape[0] == m._xCounter + for lbl in li.vlabels: + assert li.label_to_pos[lbl] >= 0 + assert (li.label_to_pos[li.vlabels] == np.arange(li.n_active_vars)).all() + + +def test_variable_label_index_invalidation(m: Model) -> None: + li = m.variables.label_index + old_vlabels = li.vlabels.copy() + m.add_variables(name="w") + li.invalidate() + assert len(li.vlabels) > len(old_vlabels) + + +def test_to_matrix_with_rhs(m: Model) -> None: + c = m.constraints["c"] + li = m.variables.label_index + csr, con_labels, b, sense = c.to_matrix_with_rhs(li) + assert csr.shape[0] == len(con_labels) + assert csr.shape[0] == len(b) + assert csr.shape[0] == len(sense) + assert all(s in ("<", ">", "=") for s in sense) + np.testing.assert_array_equal(b, c._rhs) + + +def test_to_matrix_with_rhs_mutable(m: Model) -> None: + mc = m.constraints["c"].mutable() + li = m.variables.label_index + csr, con_labels, b, sense = mc.to_matrix_with_rhs(li) + assert csr.shape[0] == len(con_labels) + assert csr.shape[0] == len(b) + assert csr.shape[0] == len(sense) + + +def test_constraint_repr_shows_variable_names(m: Model) -> None: + c = m.constraints["c"] + r = repr(c) + assert "x" in r + + +def test_freeze_mixed_signs_from_rule() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(4, name="i")], name="x") + coords = [pd.RangeIndex(4, name="i")] + + def bound(m, i): + if i % 2: + return x.at[i] >= i + return x.at[i] == 0.0 + + con = m.add_constraints(bound, coords=coords, name="mixed_rule") + assert isinstance(con, linopy.constraints.CSRConstraint) + assert isinstance(con._sign, np.ndarray) + assert con.ncons == 4 + expected_signs = ["=", ">=", "=", ">="] + assert list(con._sign) == expected_signs + np.testing.assert_array_equal(con.sign.values, expected_signs) + + +def test_frozen_rhs_setter() -> None: + m = Model() + time = pd.RangeIndex(5, name="t") + x = m.add_variables(lower=0, coords=[time], name="x") + con = m.add_constraints(x >= 1, name="c") + assert isinstance(con, linopy.constraints.CSRConstraint) + con.rhs = 10 + np.testing.assert_array_equal(con._rhs, np.full(5, 10.0)) + factor = pd.Series(range(5), index=time) + con.rhs = 2 * factor + np.testing.assert_array_equal(con._rhs, 2 * np.arange(5, dtype=float)) + + +def test_frozen_lhs_setter() -> None: + m = Model() + time = pd.RangeIndex(5, name="t") + x = m.add_variables(lower=0, coords=[time], name="x") + y = m.add_variables(lower=0, coords=[time], name="y") + con = m.add_constraints(x >= 0, name="c") + assert isinstance(con, linopy.constraints.CSRConstraint) + con.lhs = 3 * x + 2 * y + lhs = con.mutable().lhs + assert lhs.nterm == 2 + + +def test_frozen_setter_invalidates_dual() -> None: + m = Model() + x = m.add_variables(lower=0, coords=[pd.RangeIndex(3, name="i")], name="x") + con = m.add_constraints(x >= 0, name="c") + con._dual = np.array([1.0, 2.0, 3.0]) + con.rhs = 10 + assert con._dual is None + + +def test_mixed_sign_to_matrix_with_rhs() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(4, name="i")], name="x") + coords = [pd.RangeIndex(4, name="i")] + + def bound(m, i): + if i % 2: + return x.at[i] >= i + return x.at[i] == 0.0 + + con = m.add_constraints(bound, coords=coords, name="c") + li = m.variables.label_index + csr, con_labels, b, sense = con.to_matrix_with_rhs(li) + assert len(sense) == 4 + assert list(sense) == ["=", ">", "=", ">"] + + +def test_mixed_sign_sanitize_infinities() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(4, name="i")], name="x") + m.add_constraints(x >= 0, name="c", freeze=False) + mc = m.constraints["c"] + mc._data["sign"] = xr.DataArray(["<=", ">=", "<=", ">="], dims=["i"]) + mc._data["rhs"] = xr.DataArray([np.inf, -np.inf, 1.0, 2.0], dims=["i"]) + frozen = mc.freeze() + frozen.sanitize_infinities() + assert frozen.ncons == 2 + np.testing.assert_array_equal(frozen._rhs, [1.0, 2.0]) + + +def test_mixed_sign_repr() -> None: + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(4, name="i")], name="x") + coords = [pd.RangeIndex(4, name="i")] + + def bound(m, i): + if i % 2: + return x.at[i] >= i + return x.at[i] == 0.0 + + con = m.add_constraints(bound, coords=coords, name="c") + r = repr(con) + assert "≥" in r + assert "=" in r diff --git a/test/test_constraints.py b/test/test_constraints.py index 9a467c8c..1667bfec 100644 --- a/test/test_constraints.py +++ b/test/test_constraints.py @@ -36,10 +36,10 @@ def test_constraint_assignment() -> None: assert "con0" in getattr(m.constraints, attr) assert m.constraints.labels.con0.shape == (10, 10) - assert m.constraints.labels.con0.dtype == int - assert m.constraints.coeffs.con0.dtype in (int, float) - assert m.constraints.vars.con0.dtype in (int, float) - assert m.constraints.rhs.con0.dtype in (int, float) + assert np.issubdtype(m.constraints.labels.con0.dtype, np.integer) + assert np.issubdtype(m.constraints.coeffs.con0.dtype, np.number) + assert np.issubdtype(m.constraints.vars.con0.dtype, np.number) + assert np.issubdtype(m.constraints.rhs.con0.dtype, np.number) assert_conequal(m.constraints.con0, con0) @@ -90,10 +90,10 @@ def test_anonymous_constraint_assignment() -> None: assert "con0" in getattr(m.constraints, attr) assert m.constraints.labels.con0.shape == (10, 10) - assert m.constraints.labels.con0.dtype == int - assert m.constraints.coeffs.con0.dtype in (int, float) - assert m.constraints.vars.con0.dtype in (int, float) - assert m.constraints.rhs.con0.dtype in (int, float) + assert np.issubdtype(m.constraints.labels.con0.dtype, np.integer) + assert np.issubdtype(m.constraints.coeffs.con0.dtype, np.number) + assert np.issubdtype(m.constraints.vars.con0.dtype, np.number) + assert np.issubdtype(m.constraints.rhs.con0.dtype, np.number) def test_constraint_assignment_with_tuples() -> None: diff --git a/test/test_io.py b/test/test_io.py index e8ded144..09b19fdc 100644 --- a/test/test_io.py +++ b/test/test_io.py @@ -78,6 +78,48 @@ def test_model_to_netcdf(model: Model, tmp_path: Path) -> None: assert_model_equal(m, p) +def test_model_to_netcdf_frozen_constraint(tmp_path: Path) -> None: + from linopy.constraints import CSRConstraint + + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(5, name="i")], name="x") + m.add_constraints(x >= 1, name="c", freeze=True) + + assert isinstance(m.constraints["c"], CSRConstraint) + + fn = tmp_path / "test_frozen.nc" + m.to_netcdf(fn) + p = read_netcdf(fn) + + assert isinstance(p.constraints["c"], CSRConstraint) + assert_model_equal(m, p) + + +def test_model_to_netcdf_mixed_sign_constraint(tmp_path: Path) -> None: + from linopy.constraints import CSRConstraint + + m = Model() + x = m.add_variables(coords=[pd.RangeIndex(4, name="i")], name="x") + + def bound(m, i): + if i % 2: + return x.at[i] >= i + return x.at[i] == 0.0 + + m.add_constraints(bound, coords=[pd.RangeIndex(4, name="i")], name="c") + assert isinstance(m.constraints["c"], CSRConstraint) + + fn = tmp_path / "test_mixed_sign.nc" + m.to_netcdf(fn) + p = read_netcdf(fn) + + assert isinstance(p.constraints["c"], CSRConstraint) + import numpy as np + + np.testing.assert_array_equal(m.constraints["c"]._sign, p.constraints["c"]._sign) + assert_model_equal(m, p) + + def test_model_to_netcdf_with_sense(model: Model, tmp_path: Path) -> None: m = model m.objective.sense = "max" diff --git a/test/test_linear_expression.py b/test/test_linear_expression.py index d3b8d426..27ad5c34 100644 --- a/test/test_linear_expression.py +++ b/test/test_linear_expression.py @@ -14,6 +14,7 @@ import polars as pl import pytest import xarray as xr +from xarray.core.types import JoinOptions from xarray.testing import assert_equal from linopy import LinearExpression, Model, QuadraticExpression, Variable, merge @@ -1920,7 +1921,8 @@ def test_add_constant_join_override(self, a: Variable, c: Variable) -> None: def test_add_same_coords_all_joins(self, a: Variable, c: Variable) -> None: expr_a = 1 * a + 5 const = xr.DataArray([1, 2, 3], dims=["i"], coords={"i": [0, 1, 2]}) - for join in ["override", "outer", "inner"]: + joins: list[JoinOptions] = ["override", "outer", "inner"] + for join in joins: result = expr_a.add(const, join=join) assert list(result.coords["i"].values) == [0, 1, 2] np.testing.assert_array_equal(result.const.values, [6, 7, 8]) diff --git a/test/test_model.py b/test/test_model.py index c0988c26..a41ab92b 100644 --- a/test/test_model.py +++ b/test/test_model.py @@ -110,10 +110,11 @@ def test_remove_variable() -> None: assert "x" in m.variables - m.remove_variables("x") + with pytest.warns(UserWarning, match="con0"): + m.remove_variables("x") assert "x" not in m.variables - assert not m.constraints.con0.vars.isin(x.labels).any() + assert "con0" not in m.constraints assert not m.objective.vars.isin(x.labels).any() diff --git a/test/test_optimization.py b/test/test_optimization.py index cdac8e61..870cc154 100644 --- a/test/test_optimization.py +++ b/test/test_optimization.py @@ -298,7 +298,7 @@ def modified_model() -> Model: x = m.add_variables(coords=[lower.index], name="x", binary=True) y = m.add_variables(lower, name="y") - c = m.add_constraints(x + y, GREATER_EQUAL, 10) + c = m.add_constraints(x + y, GREATER_EQUAL, 10, freeze=False) y.lower = 9 c.lhs = 2 * x + y diff --git a/test/test_scalar_constraint.py b/test/test_scalar_constraint.py index cf5b3724..4872fada 100644 --- a/test/test_scalar_constraint.py +++ b/test/test_scalar_constraint.py @@ -6,7 +6,7 @@ import linopy from linopy import GREATER_EQUAL, Model, Variable -from linopy.constraints import AnonymousScalarConstraint, Constraint +from linopy.constraints import AnonymousScalarConstraint, ConstraintBase @pytest.fixture @@ -32,20 +32,20 @@ def test_anonymous_scalar_constraint_type(x: Variable) -> None: def test_simple_constraint_type(m: Model, x: Variable) -> None: - c: Constraint = m.add_constraints(x.at[0] >= 0) - assert isinstance(c, linopy.constraints.Constraint) + c: ConstraintBase = m.add_constraints(x.at[0] >= 0) + assert isinstance(c, linopy.constraints.ConstraintBase) def test_compound_constraint_type(m: Model, x: Variable) -> None: - c: Constraint = m.add_constraints(x.at[0] + x.at[1] >= 0) - assert isinstance(c, linopy.constraints.Constraint) + c: ConstraintBase = m.add_constraints(x.at[0] + x.at[1] >= 0) + assert isinstance(c, linopy.constraints.ConstraintBase) def test_explicit_simple_constraint_type(m: Model, x: Variable) -> None: - c: Constraint = m.add_constraints(x.at[0], GREATER_EQUAL, 0) - assert isinstance(c, linopy.constraints.Constraint) + c: ConstraintBase = m.add_constraints(x.at[0], GREATER_EQUAL, 0) + assert isinstance(c, linopy.constraints.ConstraintBase) def test_explicit_compound_constraint_type(m: Model, x: Variable) -> None: - c: Constraint = m.add_constraints(x.at[0] + x.at[1], GREATER_EQUAL, 0) - assert isinstance(c, linopy.constraints.Constraint) + c: ConstraintBase = m.add_constraints(x.at[0] + x.at[1], GREATER_EQUAL, 0) + assert isinstance(c, linopy.constraints.ConstraintBase)