Skip to content

Commit

Permalink
Use own errors for Variable
Browse files Browse the repository at this point in the history
  • Loading branch information
glatterf42 committed Sep 30, 2024
1 parent 3e8d9ac commit 5468b41
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 21 deletions.
4 changes: 3 additions & 1 deletion ixmp4/data/db/optimization/variable/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from sqlalchemy.orm import validates

from ixmp4 import db
from ixmp4.core.exceptions import OptimizationDataValidationError
from ixmp4.data import types
from ixmp4.data.abstract import optimization as abstract

Expand All @@ -15,6 +16,7 @@ class OptimizationVariable(base.BaseModel):
# NOTE: These might be mixin-able, but would require some abstraction
NotFound: ClassVar = abstract.Variable.NotFound
NotUnique: ClassVar = abstract.Variable.NotUnique
DataInvalid: ClassVar = OptimizationDataValidationError
DeletionPrevented: ClassVar = abstract.Variable.DeletionPrevented

# constrained_to_indexsets: ClassVar[list[str] | None] = None
Expand All @@ -31,7 +33,7 @@ def validate_data(self, key, data: dict[str, Any]):
del data_to_validate["levels"]
del data_to_validate["marginals"]
_ = utils.validate_data(
key=key,
host=self,
data=data_to_validate,
columns=self.columns,
)
Expand Down
27 changes: 20 additions & 7 deletions ixmp4/data/db/optimization/variable/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import pandas as pd

from ixmp4 import db
from ixmp4.core.exceptions import OptimizationItemUsageError
from ixmp4.data.abstract import optimization as abstract
from ixmp4.data.auth.decorators import guard

Expand All @@ -19,6 +20,8 @@ class VariableRepository(
):
model_class = Variable

UsageError = OptimizationItemUsageError

def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.docs = OptimizationVariableDocsRepository(*args, **kwargs)
Expand Down Expand Up @@ -114,14 +117,16 @@ def create(
# TODO If this is removed, need to check above that constrained_to_indexsets
# is not None
if constrained_to_indexsets is None:
raise ValueError(
raise self.UsageError(
f"While processing Variable {name}: \n"
"Received `column_names` to name columns, but no "
"`constrained_to_indexsets` to indicate which IndexSets to use for "
"these columns. Please provide `constrained_to_indexsets` or "
"remove `column_names`!"
)
elif len(column_names) != len(constrained_to_indexsets):
raise ValueError(
raise self.UsageError(
f"While processing Variable {name}: \n"
"`constrained_to_indexsets` and `column_names` not equal in "
"length! Please provide the same number of entries for both!"
)
Expand All @@ -130,7 +135,10 @@ def create(
# if len(constrained_to_indexsets) != len(set(constrained_to_indexsets)):
# raise ValueError("Each dimension must be constrained to a unique indexset!") # noqa
elif len(column_names) != len(set(column_names)):
raise ValueError("The given `column_names` are not unique!")
raise self.UsageError(
f"While processing Variable {name}: \n"
"The given `column_names` are not unique!"
)

variable = super().create(
run_id=run_id,
Expand Down Expand Up @@ -159,13 +167,18 @@ def tabulate(self, *args, **kwargs) -> pd.DataFrame:
@guard("edit")
def add_data(self, variable_id: int, data: dict[str, Any] | pd.DataFrame) -> None:
if isinstance(data, dict):
data = pd.DataFrame.from_dict(data=data)
try:
data = pd.DataFrame.from_dict(data=data)
except ValueError as e:
raise Variable.DataInvalid(str(e)) from e
variable = self.get_by_id(id=variable_id)

missing_columns = set(["levels", "marginals"]) - set(data.columns)
assert (
not missing_columns
), f"Variable.data must include the column(s): {', '.join(missing_columns)}!"
if missing_columns:
raise OptimizationItemUsageError(
"Variable.data must include the column(s): "
f"{', '.join(missing_columns)}!"
)

index_list = [column.name for column in variable.columns]
existing_data = pd.DataFrame(variable.data)
Expand Down
35 changes: 29 additions & 6 deletions tests/core/test_optimization_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@

import ixmp4
from ixmp4.core import IndexSet, OptimizationVariable
from ixmp4.core.exceptions import (
OptimizationDataValidationError,
OptimizationItemUsageError,
)

from ..utils import assert_unordered_equality, create_indexsets_for_run

Expand Down Expand Up @@ -70,8 +74,19 @@ def test_create_variable(self, platform: ixmp4.Platform):
"Variable", constrained_to_indexsets=[indexset.name]
)

# Test that giving column_names, but not constrained_to_indexsets raises
with pytest.raises(
OptimizationItemUsageError,
match="Received `column_names` to name columns, but no "
"`constrained_to_indexsets`",
):
_ = run.optimization.variables.create(
"Variable 0",
column_names=["Dimension 1"],
)

# Test mismatch in constrained_to_indexsets and column_names raises
with pytest.raises(ValueError, match="not equal in length"):
with pytest.raises(OptimizationItemUsageError, match="not equal in length"):
_ = run.optimization.variables.create(
"Variable 0",
constrained_to_indexsets=[indexset.name],
Expand All @@ -88,7 +103,9 @@ def test_create_variable(self, platform: ixmp4.Platform):
assert variable_3.columns[0].name == "Column 1"

# Test duplicate column_names raise
with pytest.raises(ValueError, match="`column_names` are not unique"):
with pytest.raises(
OptimizationItemUsageError, match="`column_names` are not unique"
):
_ = run.optimization.variables.create(
name="Variable 0",
constrained_to_indexsets=[indexset.name, indexset.name],
Expand Down Expand Up @@ -162,7 +179,8 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
)

with pytest.raises(
AssertionError, match=r"must include the column\(s\): marginals!"
OptimizationItemUsageError,
match=r"must include the column\(s\): marginals!",
):
variable_2.add(
pd.DataFrame(
Expand All @@ -175,7 +193,7 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
)

with pytest.raises(
AssertionError, match=r"must include the column\(s\): levels!"
OptimizationItemUsageError, match=r"must include the column\(s\): levels!"
):
variable_2.add(
data=pd.DataFrame(
Expand All @@ -189,7 +207,10 @@ def test_variable_add_data(self, platform: ixmp4.Platform):

# By converting data to pd.DataFrame, we automatically enforce equal length
# of new columns, raises All arrays must be of the same length otherwise:
with pytest.raises(ValueError, match="All arrays must be of the same length"):
with pytest.raises(
OptimizationDataValidationError,
match="All arrays must be of the same length",
):
variable_2.add(
data={
indexset.name: ["foo", "foo"],
Expand All @@ -199,7 +220,9 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
},
)

with pytest.raises(ValueError, match="contains duplicate rows"):
with pytest.raises(
OptimizationDataValidationError, match="contains duplicate rows"
):
variable_2.add(
data={
indexset.name: ["foo", "foo"],
Expand Down
26 changes: 19 additions & 7 deletions tests/data/test_optimization_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
import pytest

import ixmp4
from ixmp4.core.exceptions import (
OptimizationDataValidationError,
OptimizationItemUsageError,
)
from ixmp4.data.abstract import OptimizationVariable

from ..utils import assert_unordered_equality, create_indexsets_for_run
Expand Down Expand Up @@ -69,7 +73,7 @@ def test_create_variable(self, platform: ixmp4.Platform):

# Test that giving column_names, but not constrained_to_indexsets raises
with pytest.raises(
ValueError,
OptimizationItemUsageError,
match="Received `column_names` to name columns, but no "
"`constrained_to_indexsets`",
):
Expand All @@ -80,7 +84,7 @@ def test_create_variable(self, platform: ixmp4.Platform):
)

# Test mismatch in constrained_to_indexsets and column_names raises
with pytest.raises(ValueError, match="not equal in length"):
with pytest.raises(OptimizationItemUsageError, match="not equal in length"):
_ = platform.backend.optimization.variables.create(
run_id=run.id,
name="Variable 0",
Expand All @@ -99,7 +103,9 @@ def test_create_variable(self, platform: ixmp4.Platform):
assert variable_3.columns[0].name == "Column 1"

# Test duplicate column_names raise
with pytest.raises(ValueError, match="`column_names` are not unique"):
with pytest.raises(
OptimizationItemUsageError, match="`column_names` are not unique"
):
_ = platform.backend.optimization.variables.create(
run_id=run.id,
name="Variable 0",
Expand Down Expand Up @@ -184,7 +190,7 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
)

with pytest.raises(
AssertionError, match=r"must include the column\(s\): levels!"
OptimizationItemUsageError, match=r"must include the column\(s\): levels!"
):
platform.backend.optimization.variables.add_data(
variable_id=variable_2.id,
Expand All @@ -198,7 +204,8 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
)

with pytest.raises(
AssertionError, match=r"must include the column\(s\): marginals!"
OptimizationItemUsageError,
match=r"must include the column\(s\): marginals!",
):
platform.backend.optimization.variables.add_data(
variable_id=variable_2.id,
Expand All @@ -213,7 +220,10 @@ def test_variable_add_data(self, platform: ixmp4.Platform):

# By converting data to pd.DataFrame, we automatically enforce equal length
# of new columns, raises All arrays must be of the same length otherwise:
with pytest.raises(ValueError, match="All arrays must be of the same length"):
with pytest.raises(
OptimizationDataValidationError,
match="All arrays must be of the same length",
):
platform.backend.optimization.variables.add_data(
variable_id=variable_2.id,
data={
Expand All @@ -224,7 +234,9 @@ def test_variable_add_data(self, platform: ixmp4.Platform):
},
)

with pytest.raises(ValueError, match="contains duplicate rows"):
with pytest.raises(
OptimizationDataValidationError, match="contains duplicate rows"
):
platform.backend.optimization.variables.add_data(
variable_id=variable_2.id,
data={
Expand Down

0 comments on commit 5468b41

Please sign in to comment.