Skip to content

Commit

Permalink
[python] Migrate to f-strings in python-package/lightgbm/sklearn.py (#…
Browse files Browse the repository at this point in the history
…4188)

* Migrate to f-strings in python-package/lightgbm/sklearn.py

* Apply suggestions from code review

Co-authored-by: James Lamb <[email protected]>

* Update python-package/lightgbm/sklearn.py

Co-authored-by: James Lamb <[email protected]>

* Apply suggestions from code review

Co-authored-by: Nikita Titov <[email protected]>

* Add suggestions from code review

* resolve conflicts

* Apply suggestions from code review

Co-authored-by: James Lamb <[email protected]>

* Update sklearn.py

Co-authored-by: James Lamb <[email protected]>
Co-authored-by: Nikita Titov <[email protected]>
  • Loading branch information
3 people authored Apr 19, 2021
1 parent c109a59 commit 8e126c8
Showing 1 changed file with 11 additions and 12 deletions.
23 changes: 11 additions & 12 deletions python-package/lightgbm/sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __call__(self, preds, dataset):
elif argc == 3:
grad, hess = self.func(labels, preds, dataset.get_group())
else:
raise TypeError("Self-defined objective function should have 2 or 3 arguments, got %d" % argc)
raise TypeError(f"Self-defined objective function should have 2 or 3 arguments, got {argc}")
"""weighted for objective"""
weight = dataset.get_weight()
if weight is not None:
Expand Down Expand Up @@ -171,7 +171,7 @@ def __call__(self, preds, dataset):
elif argc == 4:
return self.func(labels, preds, dataset.get_weight(), dataset.get_group())
else:
raise TypeError("Self-defined eval function should have 2, 3 or 4 arguments, got %d" % argc)
raise TypeError(f"Self-defined eval function should have 2, 3 or 4 arguments, got {argc}")


# documentation templates for LGBMModel methods are shared between the classes in
Expand Down Expand Up @@ -532,8 +532,8 @@ def set_params(self, **params):
"""
for key, value in params.items():
setattr(self, key, value)
if hasattr(self, '_' + key):
setattr(self, '_' + key, value)
if hasattr(self, f"_{key}"):
setattr(self, f"_{key}", value)
self._other_params[key] = value
return self

Expand Down Expand Up @@ -652,7 +652,7 @@ def _get_meta_data(collection, name, i):
elif isinstance(collection, dict):
return collection.get(i, None)
else:
raise TypeError('{} should be dict or list'.format(name))
raise TypeError(f"{name} should be dict or list")

if isinstance(eval_set, tuple):
eval_set = [eval_set]
Expand Down Expand Up @@ -720,9 +720,8 @@ def predict(self, X, raw_score=False, start_iteration=0, num_iteration=None,
n_features = X.shape[1]
if self._n_features != n_features:
raise ValueError("Number of features of the model must "
"match the input. Model n_features_ is %s and "
"input n_features is %s "
% (self._n_features, n_features))
f"match the input. Model n_features_ is {self._n_features} and "
f"input n_features is {n_features}")
return self._Booster.predict(X, raw_score=raw_score, start_iteration=start_iteration, num_iteration=num_iteration,
pred_leaf=pred_leaf, pred_contrib=pred_contrib, **kwargs)

Expand Down Expand Up @@ -992,7 +991,7 @@ def fit(self, X, y,
+ _base_doc[_base_doc.find('eval_init_score :'):]) # type: ignore
_base_doc = fit.__doc__
_before_early_stop, _early_stop, _after_early_stop = _base_doc.partition('early_stopping_rounds :')
fit.__doc__ = (_before_early_stop
+ 'eval_at : iterable of int, optional (default=(1, 2, 3, 4, 5))\n'
+ ' ' * 12 + 'The evaluation positions of the specified metric.\n'
+ ' ' * 8 + _early_stop + _after_early_stop)
fit.__doc__ = (f"{_before_early_stop}"
"eval_at : iterable of int, optional (default=(1, 2, 3, 4, 5))\n"
f"{' ':12}The evaluation positions of the specified metric.\n"
f"{' ':8}{_early_stop}{_after_early_stop}")

0 comments on commit 8e126c8

Please sign in to comment.