Skip to content

Commit

Permalink
feat: mark optional hyperparameters as keyword only (#296)
Browse files Browse the repository at this point in the history
Closes #278.

### Summary of Changes

* Mark optional hyperparameters as keyword-only
* Add guideline to consider marking optional parameters as keyword-only
  • Loading branch information
lars-reimann authored May 6, 2023
1 parent a91172c commit 44a41eb
Show file tree
Hide file tree
Showing 12 changed files with 35 additions and 13 deletions.
20 changes: 20 additions & 0 deletions docs/development/guidelines.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,26 @@ Write full words rather than abbreviations. The increased verbosity is offset by
figure.scs(CS.AUT)
```

### Consider marking optional parameters as keyword-only

_Keyword-only parameters_ are parameters that can only be passed by name. It prevents users from accidentally passing a value to the wrong parameter. This can happen easily if several parameters have the same type. Moreover, marking a parameter as keyword-only allows us to change the order of parameters without breaking client code. Because of this, strongly consider marking optional parameters as keyword-only. In particular, optional hyperparameters of models should be keyword-only.

!!! success "**DO** (library code):"

```py
class RandomForest:
def __init__(self, *, number_of_trees: int = 100) -> None:
...
```

!!! failure "**DON'T** (library code):"

```py
class RandomForest:
def __init__(self, number_of_trees: int = 100) -> None:
...
```

### Specify types of parameters and results

Use [type hints](https://docs.python.org/3/library/typing.html) to describe the types of parameters and results of functions. This enables static type checking of client code.
Expand Down
1 change: 1 addition & 0 deletions src/safeds/ml/classical/classification/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class AdaBoost(Classifier):

def __init__(
self,
*,
learner: Classifier | None = None,
maximum_number_of_learners: int = 50,
learning_rate: float = 1.0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class GradientBoosting(Classifier):
If `number_of_trees` is less than or equal to 0 or `learning_rate` is non-positive.
"""

def __init__(self, number_of_trees: int = 100, learning_rate: float = 0.1) -> None:
def __init__(self, *, number_of_trees: int = 100, learning_rate: float = 0.1) -> None:
# Validation
if number_of_trees <= 0:
raise ValueError("The parameter 'number_of_trees' has to be greater than 0.")
Expand Down
4 changes: 2 additions & 2 deletions src/safeds/ml/classical/classification/_random_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class RandomForest(Classifier):
If `number_of_trees` is less than or equal to 0.
"""

def __init__(self, number_of_trees: int = 100) -> None:
def __init__(self, *, number_of_trees: int = 100) -> None:
# Validation
if number_of_trees < 1:
raise ValueError("The parameter 'number_of_trees' has to be greater than 0.")
Expand Down Expand Up @@ -65,7 +65,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest:
wrapped_classifier = self._get_sklearn_classifier()
fit(wrapped_classifier, training_set)

result = RandomForest(self._number_of_trees)
result = RandomForest(number_of_trees=self._number_of_trees)
result._wrapped_classifier = wrapped_classifier
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class SupportVectorMachine(Classifier):
If `c` is less than or equal to 0.
"""

def __init__(self, c: float = 1.0) -> None:
def __init__(self, *, c: float = 1.0) -> None:
# Internal state
self._wrapped_classifier: sk_SVC | None = None
self._feature_names: list[str] | None = None
Expand Down Expand Up @@ -63,7 +63,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine:
wrapped_classifier = self._get_sklearn_classifier()
fit(wrapped_classifier, training_set)

result = SupportVectorMachine(self._c)
result = SupportVectorMachine(c=self._c)
result._wrapped_classifier = wrapped_classifier
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
1 change: 1 addition & 0 deletions src/safeds/ml/classical/regression/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class AdaBoost(Regressor):

def __init__(
self,
*,
learner: Regressor | None = None,
maximum_number_of_learners: int = 50,
learning_rate: float = 1.0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class ElasticNetRegression(Regressor):
If `alpha` is negative or `lasso_ratio` is not between 0 and 1.
"""

def __init__(self, alpha: float = 1.0, lasso_ratio: float = 0.5) -> None:
def __init__(self, *, alpha: float = 1.0, lasso_ratio: float = 0.5) -> None:
# Validation
if alpha < 0:
raise ValueError("The parameter 'alpha' must be non-negative")
Expand Down
2 changes: 1 addition & 1 deletion src/safeds/ml/classical/regression/_gradient_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class GradientBoosting(Regressor):
If `number_of_trees` is less than or equal to 0 or `learning_rate` is non-positive.
"""

def __init__(self, number_of_trees: int = 100, learning_rate: float = 0.1) -> None:
def __init__(self, *, number_of_trees: int = 100, learning_rate: float = 0.1) -> None:
# Validation
if number_of_trees <= 0:
raise ValueError("The parameter 'number_of_trees' has to be greater than 0.")
Expand Down
2 changes: 1 addition & 1 deletion src/safeds/ml/classical/regression/_lasso_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class LassoRegression(Regressor):
If `alpha` is negative.
"""

def __init__(self, alpha: float = 1.0) -> None:
def __init__(self, *, alpha: float = 1.0) -> None:
# Validation
if alpha < 0:
raise ValueError("The parameter 'alpha' must be non-negative")
Expand Down
4 changes: 2 additions & 2 deletions src/safeds/ml/classical/regression/_random_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class RandomForest(Regressor):
If `number_of_trees` is less than or equal to 0.
"""

def __init__(self, number_of_trees: int = 100) -> None:
def __init__(self, *, number_of_trees: int = 100) -> None:
# Validation
if number_of_trees < 1:
raise ValueError("The parameter 'number_of_trees' has to be greater than 0.")
Expand Down Expand Up @@ -65,7 +65,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest:
wrapped_regressor = self._get_sklearn_regressor()
fit(wrapped_regressor, training_set)

result = RandomForest(self._number_of_trees)
result = RandomForest(number_of_trees=self._number_of_trees)
result._wrapped_regressor = wrapped_regressor
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
2 changes: 1 addition & 1 deletion src/safeds/ml/classical/regression/_ridge_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class RidgeRegression(Regressor):
If `alpha` is negative.
"""

def __init__(self, alpha: float = 1.0) -> None:
def __init__(self, *, alpha: float = 1.0) -> None:
# Validation
if alpha < 0:
raise ValueError("The parameter 'alpha' must be non-negative")
Expand Down
4 changes: 2 additions & 2 deletions src/safeds/ml/classical/regression/_support_vector_machine.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class SupportVectorMachine(Regressor):
If `c` is less than or equal to 0.
"""

def __init__(self, c: float = 1.0) -> None:
def __init__(self, *, c: float = 1.0) -> None:
# Internal state
self._wrapped_regressor: sk_SVR | None = None
self._feature_names: list[str] | None = None
Expand Down Expand Up @@ -63,7 +63,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine:
wrapped_regressor = self._get_sklearn_regressor()
fit(wrapped_regressor, training_set)

result = SupportVectorMachine(self._c)
result = SupportVectorMachine(c=self._c)
result._wrapped_regressor = wrapped_regressor
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down

0 comments on commit 44a41eb

Please sign in to comment.