-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: set learning rate of Gradient Boosting models (#253)
Closes #168. ### Summary of Changes Add `learning_rate` parameter to `GradientBoosting` classifier and regressor. --------- Co-authored-by: Lars Reimann <[email protected]>
- Loading branch information
1 parent
8eea3dd
commit 9ffaf55
Showing
4 changed files
with
72 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
17 changes: 17 additions & 0 deletions
17
tests/safeds/ml/classical/classification/test_gradient_boosting_classification.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
import pytest | ||
from safeds.data.tabular.containers import Table | ||
from safeds.ml.classical.classification import GradientBoosting | ||
|
||
|
||
def test_should_throw_value_error_if_learning_rate_is_non_positive() -> None: | ||
with pytest.raises(ValueError, match="learning_rate must be positive."): | ||
GradientBoosting(learning_rate=-1) | ||
|
||
|
||
def test_should_pass_learning_rate_to_sklearn() -> None: | ||
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]}) | ||
tagged_table = training_set.tag_columns("col1") | ||
|
||
regressor = GradientBoosting(learning_rate=2).fit(tagged_table) | ||
assert regressor._wrapped_classifier is not None | ||
assert regressor._wrapped_classifier.learning_rate == regressor._learning_rate |
17 changes: 17 additions & 0 deletions
17
tests/safeds/ml/classical/regression/test_gradient_boosting_regression.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
import pytest | ||
from safeds.data.tabular.containers import Table | ||
from safeds.ml.classical.regression import GradientBoosting | ||
|
||
|
||
def test_should_throw_value_error_if_learning_rate_is_non_positive() -> None: | ||
with pytest.raises(ValueError, match="learning_rate must be positive."): | ||
GradientBoosting(learning_rate=-1) | ||
|
||
|
||
def test_should_pass_learning_rate_to_sklearn() -> None: | ||
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]}) | ||
tagged_table = training_set.tag_columns("col1") | ||
|
||
regressor = GradientBoosting(learning_rate=2).fit(tagged_table) | ||
assert regressor._wrapped_regressor is not None | ||
assert regressor._wrapped_regressor.learning_rate == regressor._learning_rate |