-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: regularization strength for logistic classifier (#866)
Closes #750 ### Summary of Changes Added an optional, keyword-only constructor parameter c: float = 1.0 and passed it to the wrapped scikit-learn estimator. <!-- Please provide a summary of changes in this pull request, ensuring all changes are explained. --> --------- Co-authored-by: grefrathc <[email protected]> Co-authored-by: megalinter-bot <[email protected]> Co-authored-by: Lars Reimann <[email protected]>
- Loading branch information
1 parent
4ef078e
commit 9f74e92
Showing
2 changed files
with
60 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
33 changes: 33 additions & 0 deletions
33
tests/safeds/ml/classical/classification/test_logistic_classifier.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
import pytest | ||
from safeds.data.labeled.containers import TabularDataset | ||
from safeds.data.tabular.containers import Table | ||
from safeds.exceptions import OutOfBoundsError | ||
from safeds.ml.classical.classification import LogisticClassifier | ||
|
||
|
||
@pytest.fixture() | ||
def training_set() -> TabularDataset: | ||
table = Table({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]}) | ||
return table.to_tabular_dataset(target_name="col1") | ||
|
||
|
||
class TestC: | ||
def test_should_be_passed_to_fitted_model(self, training_set: TabularDataset) -> None: | ||
fitted_model = LogisticClassifier(c=2).fit(training_set) | ||
assert fitted_model.c == 2 | ||
|
||
def test_should_be_passed_to_sklearn(self, training_set: TabularDataset) -> None: | ||
fitted_model = LogisticClassifier(c=2).fit(training_set) | ||
assert fitted_model._wrapped_model is not None | ||
assert fitted_model._wrapped_model.C == 2 | ||
|
||
def test_clone(self, training_set: TabularDataset) -> None: | ||
fitted_model = LogisticClassifier(c=2).fit(training_set) | ||
cloned_classifier = fitted_model._clone() | ||
assert isinstance(cloned_classifier, LogisticClassifier) | ||
assert cloned_classifier.c == fitted_model.c | ||
|
||
@pytest.mark.parametrize("c", [-1.0, 0.0], ids=["minus_one", "zero"]) | ||
def test_should_raise_if_less_than_or_equal_to_0(self, c: float) -> None: | ||
with pytest.raises(OutOfBoundsError): | ||
LogisticClassifier(c=c) |