Skip to content

Commit

Permalink
MAINT: rename build_fn to model in tests (#98)
Browse files Browse the repository at this point in the history
  • Loading branch information
adriangb authored Oct 10, 2020
1 parent 6b3dd47 commit 5ed2646
Show file tree
Hide file tree
Showing 9 changed files with 45 additions and 53 deletions.
4 changes: 3 additions & 1 deletion scikeras/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,10 @@ def __init__(
**kwargs,
):

# ensure prebuilt model can be serialized
if isinstance(model, Model):
make_model_picklable(model)
if isinstance(build_fn, Model):
# ensure prebuilt model can be serialized
make_model_picklable(build_fn)

# Parse hardcoded params
Expand Down
24 changes: 12 additions & 12 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,24 +92,24 @@ class TestBasicAPI:

def test_classify_build_fn(self):
"""Tests a classification task for errors."""
clf = KerasClassifier(build_fn=build_fn_clf, hidden_dim=5)
clf = KerasClassifier(model=build_fn_clf, hidden_dim=5)
basic_checks(clf, load_iris)

def test_classify_inherit_class_build_fn(self):
"""Tests for errors using an inherited class."""

clf = InheritClassBuildFnClf(build_fn=None, hidden_dim=5)
clf = InheritClassBuildFnClf(model=None, hidden_dim=5)
basic_checks(clf, load_iris)

def test_regression_build_fn(self):
"""Tests for errors using KerasRegressor."""
reg = KerasRegressor(build_fn=build_fn_reg, hidden_dim=5)
reg = KerasRegressor(model=build_fn_reg, hidden_dim=5)
basic_checks(reg, load_boston)

def test_regression_inherit_class_build_fn(self):
"""Tests for errors using KerasRegressor inherited."""

reg = InheritClassBuildFnReg(build_fn=None, hidden_dim=5,)
reg = InheritClassBuildFnReg(model=None, hidden_dim=5,)
basic_checks(reg, load_boston)


Expand Down Expand Up @@ -336,7 +336,7 @@ def test_basic(self, config):
compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,},
)

estimator = model(build_fn=keras_model)
estimator = model(model=keras_model)
basic_checks(estimator, loader)

@pytest.mark.parametrize("config", ["MLPRegressor", "MLPClassifier"])
Expand Down Expand Up @@ -372,7 +372,7 @@ def test_ensemble(self, config):
compile_kwargs={"optimizer": "adam", "loss": None, "metrics": None,},
)

base_estimator = model(build_fn=keras_model)
base_estimator = model(model=keras_model)
for ensemble in ensembles:
estimator = ensemble(base_estimator=base_estimator, n_estimators=2)
basic_checks(estimator, loader)
Expand All @@ -385,7 +385,7 @@ def test_warm_start():
X, y = data.data[:100], data.target[:100]
# Initial fit
estimator = KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
loss=KerasRegressor.r_squared,
model__hidden_layer_sizes=(100,),
)
Expand All @@ -412,7 +412,7 @@ def test_partial_fit(self):
data = load_boston()
X, y = data.data[:100], data.target[:100]
estimator = KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
loss=KerasRegressor.r_squared,
model__hidden_layer_sizes=[100,],
)
Expand All @@ -436,7 +436,7 @@ def test_partial_fit_history_len(self):
data = load_boston()
X, y = data.data[:100], data.target[:100]
estimator = KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
loss=KerasRegressor.r_squared,
metrics="mean_squared_error",
model__hidden_layer_sizes=[100,],
Expand Down Expand Up @@ -506,7 +506,7 @@ def test_history():
"""
data = load_boston()
X, y = data.data[:100], data.target[:100]
estimator = KerasRegressor(build_fn=dynamic_regressor, model__hidden_layer_sizes=[])
estimator = KerasRegressor(model=dynamic_regressor, model__hidden_layer_sizes=[])

estimator.partial_fit(X, y)

Expand Down Expand Up @@ -540,7 +540,7 @@ def build_fn(compile_with_loss=None):

for loss in losses:
estimator = KerasRegressor(
build_fn=build_fn,
model=build_fn,
loss=loss,
# compile_with_loss=None returns an un-compiled model
compile_with_loss=None,
Expand All @@ -550,7 +550,7 @@ def build_fn(compile_with_loss=None):

for myloss in losses:
estimator = KerasRegressor(
build_fn=build_fn,
model=build_fn,
loss="binary_crossentropy",
# compile_with_loss != None overrides loss
compile_with_loss=myloss,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def test_callbacks():
"""Test estimators with callbacks.
"""
estimator = KerasClassifier(
build_fn=dynamic_classifier,
model=dynamic_classifier,
callbacks=(SentinalCallback(),),
optimizer="adam",
model__hidden_layer_sizes=(100,),
Expand Down
6 changes: 3 additions & 3 deletions tests/test_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_not_fitted_error():
"""Tests error when trying to use predict before fit.
"""
estimator = KerasClassifier(
build_fn=dynamic_classifier, loss=KerasRegressor.r_squared,
model=dynamic_classifier, loss=KerasRegressor.r_squared,
)
X = np.random.rand(10, 20)
with pytest.raises(NotFittedError):
Expand Down Expand Up @@ -76,7 +76,7 @@ def _keras_build_fn(self, hidden_layer_sizes=(100,)):
def dummy_func():
return None

clf = Clf(build_fn=dummy_func,)
clf = Clf(model=dummy_func,)

with pytest.raises(ValueError, match="cannot implement `_keras_build_fn`"):
clf.fit(np.array([[0]]), np.array([0]))
Expand All @@ -88,7 +88,7 @@ def test_sample_weights_all_zero():
"""
# build estimator
estimator = KerasClassifier(
build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,),
model=dynamic_classifier, model__hidden_layer_sizes=(100,),
)

# we create 20 points
Expand Down
10 changes: 4 additions & 6 deletions tests/test_input_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def build_fn_clf(meta: Dict[str, Any], compile_kwargs: Dict[str, Any],) -> Model
)
return model

clf = KerasClassifier(build_fn=build_fn_clf)
clf = KerasClassifier(model=build_fn_clf)

with pytest.raises(RuntimeError):
clf.fit(X, y)
Expand Down Expand Up @@ -289,9 +289,7 @@ def _fit_keras_model(self, X, y, sample_weight, warm_start):
assert sample_weight.dtype == np.dtype(tf.keras.backend.floatx())
return super()._fit_keras_model(X, y, sample_weight, warm_start)

clf = StrictClassifier(
build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,)
)
clf = StrictClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,))
clf.fit(X, y, sample_weight=sample_weight)
assert clf.score(X, y) >= 0
if y.dtype.kind != "O":
Expand Down Expand Up @@ -324,7 +322,7 @@ def _fit_keras_model(self, X, y, sample_weight, warm_start):
assert sample_weight.dtype == np.dtype(tf.keras.backend.floatx())
return super()._fit_keras_model(X, y, sample_weight, warm_start)

reg = StrictRegressor(build_fn=dynamic_regressor, model__hidden_layer_sizes=(100,))
reg = StrictRegressor(model=dynamic_regressor, model__hidden_layer_sizes=(100,))
reg.fit(X, y, sample_weight=sample_weight)
y_hat = reg.predict(X)
if y.dtype.kind == "f":
Expand Down Expand Up @@ -355,7 +353,7 @@ def _fit_keras_model(self, X, y, sample_weight, warm_start):
return super()._fit_keras_model(X, y, sample_weight, warm_start)

reg = StrictRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
run_eagerly=run_eagerly,
model__hidden_layer_sizes=(100,),
)
Expand Down
20 changes: 8 additions & 12 deletions tests/test_param_routing.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ def test_routing_basic(wrapper, builder):

foo_val = object()

est = wrapper(
build_fn=builder, model__hidden_layer_sizes=(100,), model__foo=foo_val
)
est = wrapper(model=builder, model__hidden_layer_sizes=(100,), model__foo=foo_val)

def build_fn(hidden_layer_sizes, foo, compile_kwargs, params, meta):
assert set(params.keys()) == set(est.get_params().keys())
Expand All @@ -46,12 +44,10 @@ def build_fn(hidden_layer_sizes, foo, compile_kwargs, params, meta):
meta=meta,
)

est = wrapper(
build_fn=build_fn, model__hidden_layer_sizes=(100,), model__foo=foo_val
)
est = wrapper(model=build_fn, model__hidden_layer_sizes=(100,), model__foo=foo_val)
est.fit(X, y)

est = wrapper(build_fn=build_fn, model__hidden_layer_sizes=(100,), foo=foo_val)
est = wrapper(model=build_fn, model__hidden_layer_sizes=(100,), foo=foo_val)
est.fit(X, y)


Expand Down Expand Up @@ -87,7 +83,7 @@ def build_fn(*args, **kwargs):
kwargs.pop("params") # dynamic_classifier/regressor don't accept it
return builder(*args, **kwargs)

est = wrapper(build_fn=build_fn, model__hidden_layer_sizes=(100,))
est = wrapper(model=build_fn, model__hidden_layer_sizes=(100,))
est.fit(X, y)


Expand All @@ -104,7 +100,7 @@ def test_no_extra_meta(wrapper_class, build_fn):
y = np.random.choice(n_classes, size=n).astype(int)

# with user kwargs
clf = wrapper_class(build_fn=build_fn, model__hidden_layer_sizes=(100,))
clf = wrapper_class(model=build_fn, model__hidden_layer_sizes=(100,))
clf.fit(X, y)
assert set(clf.get_meta().keys()) == wrapper_class._meta
# without user kwargs
Expand All @@ -113,7 +109,7 @@ def build_fn_no_args(meta, compile_kwargs):
hidden_layer_sizes=(100,), meta=meta, compile_kwargs=compile_kwargs,
)

clf = wrapper_class(build_fn=build_fn_no_args)
clf = wrapper_class(model=build_fn_no_args)
clf.fit(X, y)
assert set(clf.get_meta().keys()) == wrapper_class._meta - {"_user_params"}

Expand Down Expand Up @@ -148,8 +144,8 @@ def test_routed_unrouted_equivalence():
X = np.random.uniform(size=(n, d)).astype(float)
y = np.random.choice(n_classes, size=n).astype(int)

clf = KerasClassifier(build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,))
clf = KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,))
clf.fit(X, y)

clf = KerasClassifier(build_fn=dynamic_classifier, hidden_layer_sizes=(100,))
clf = KerasClassifier(model=dynamic_classifier, hidden_layer_sizes=(100,))
clf.fit(X, y)
20 changes: 8 additions & 12 deletions tests/test_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,11 @@ class TestRandomState:
"estimator",
[
KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
loss=KerasRegressor.r_squared,
model__hidden_layer_sizes=(100,),
),
KerasClassifier(
build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,)
),
KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,)),
],
)
def test_random_states(self, random_state, estimator):
Expand Down Expand Up @@ -72,13 +70,11 @@ def test_random_states(self, random_state, estimator):
"estimator",
[
KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
loss=KerasRegressor.r_squared,
model__hidden_layer_sizes=(100,),
),
KerasClassifier(
build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,)
),
KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,)),
],
)
@pytest.mark.parametrize("pyhash", [None, "0", "1"])
Expand Down Expand Up @@ -141,7 +137,7 @@ def test_sample_weights_fit():
"""
# build estimator
estimator = KerasClassifier(
build_fn=dynamic_classifier,
model=dynamic_classifier,
model__hidden_layer_sizes=(100,),
epochs=10,
random_state=0,
Expand Down Expand Up @@ -185,7 +181,7 @@ def test_sample_weights_score():
"""
# build estimator
estimator = KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
model__hidden_layer_sizes=(100,),
epochs=10,
random_state=0,
Expand Down Expand Up @@ -215,11 +211,11 @@ def test_build_fn_default_params():
"""Tests that default arguments arguments of
`build_fn` are registered as hyperparameters.
"""
est = KerasClassifier(build_fn=dynamic_classifier, model__hidden_layer_sizes=(100,))
est = KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(100,))
params = est.get_params()
# (100, ) is the default for dynamic_classifier
assert params["model__hidden_layer_sizes"] == (100,)

est = KerasClassifier(build_fn=dynamic_classifier, model__hidden_layer_sizes=(200,))
est = KerasClassifier(model=dynamic_classifier, model__hidden_layer_sizes=(200,))
params = est.get_params()
assert params["model__hidden_layer_sizes"] == (200,)
4 changes: 2 additions & 2 deletions tests/test_scikit_learn_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
@parametrize_with_checks(
estimators=[
KerasClassifier(
build_fn=dynamic_classifier,
model=dynamic_classifier,
# Set batch size to a large number
# (larger than X.shape[0] is the goal)
# if batch_size < X.shape[0], results will very
Expand All @@ -32,7 +32,7 @@
model__hidden_layer_sizes=[],
),
KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
# Set batch size to a large number
# (larger than X.shape[0] is the goal)
# if batch_size < X.shape[0], results will very
Expand Down
8 changes: 4 additions & 4 deletions tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def test_custom_loss_function():
"""Test that a custom loss function can be serialized.
"""
estimator = KerasRegressor(
build_fn=dynamic_regressor, loss=CustomLoss(), model__hidden_layer_sizes=(100,),
model=dynamic_regressor, loss=CustomLoss(), model__hidden_layer_sizes=(100,),
)
check_pickle(estimator, load_boston)

Expand Down Expand Up @@ -77,7 +77,7 @@ class CustomModelRegistered(Model):
def test_custom_model_registered():
"""Test that a registered subclassed Model can be serialized.
"""
estimator = KerasRegressor(build_fn=build_fn_custom_model_registered)
estimator = KerasRegressor(model=build_fn_custom_model_registered)
check_pickle(estimator, load_boston)


Expand Down Expand Up @@ -105,7 +105,7 @@ class CustomModelUnregistered(Model):
def test_custom_model_unregistered():
"""Test that an unregistered subclassed Model raises an error.
"""
estimator = KerasRegressor(build_fn=build_fn_custom_model_unregistered)
estimator = KerasRegressor(model=build_fn_custom_model_unregistered)
with pytest.raises(ValueError, match="Unknown layer"):
check_pickle(estimator, load_boston)

Expand All @@ -117,7 +117,7 @@ def test_run_eagerly():
"""Test that models compiled with run_eagerly can be serialized.
"""
estimator = KerasRegressor(
build_fn=dynamic_regressor,
model=dynamic_regressor,
run_eagerly=True,
loss=KerasRegressor.r_squared,
model__hidden_layer_sizes=(100,),
Expand Down

0 comments on commit 5ed2646

Please sign in to comment.