From f75b0996e2d5944e52cc972e48bd3a2241428d92 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 19:58:59 +0000 Subject: [PATCH 01/23] make Learner1D picklable --- adaptive/learner/learner1D.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py index e6ace878c..43b1bf5cc 100644 --- a/adaptive/learner/learner1D.py +++ b/adaptive/learner/learner1D.py @@ -625,6 +625,19 @@ def _set_data(self, data): if data: self.tell_many(*zip(*data.items())) + def __getstate__(self): + return ( + self.function, + self.bounds, + self.loss_per_interval, + self._get_data(), + ) + + def __setstate__(self, state): + function, bounds, loss_per_interval, data = state + self.__init__(function, bounds, loss_per_interval) + self._set_data(data) + def loss_manager(x_scale): def sort_key(ival, loss): From 77f3613f8a480f7bf539e304ac1b003a9bcfa233 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:25:02 +0200 Subject: [PATCH 02/23] make Learner2D picklable --- adaptive/learner/learner2D.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index 5e322b5b2..80361284a 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -706,3 +706,16 @@ def _set_data(self, data): for point in copy(self._stack): if point in self.data: self._stack.pop(point) + + def __getstate__(self): + return ( + self.function, + self.bounds, + self.loss_per_triangle, + self._get_data(), + ) + + def __setstate__(self, state): + function, bounds, loss_per_triangle, data = state + self.__init__(function, bounds, loss_per_triangle) + self._set_data(data) From 06c1dd20006d7a4861d4349edd8a1139d04341ae Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:28:58 +0200 Subject: [PATCH 03/23] make AverageLearner picklable --- adaptive/learner/average_learner.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/adaptive/learner/average_learner.py b/adaptive/learner/average_learner.py index 479538359..e2c9f476a 100644 --- a/adaptive/learner/average_learner.py +++ b/adaptive/learner/average_learner.py @@ -144,3 +144,16 @@ def _get_data(self): def _set_data(self, data): self.data, self.npoints, self.sum_f, self.sum_f_sq = data + + def __getstate__(self): + return ( + self.function, + self.atol, + self.rtol, + self._get_data(), + ) + + def __setstate__(self, state): + function, atol, rtol, data = state + self.__init__(function, atol, rtol) + self._set_data(data) From 062c2f7ddc42370fd3b66a5f730fd360e66ee176 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:33:29 +0200 Subject: [PATCH 04/23] make IntegratorLearner picklable --- adaptive/learner/integrator_learner.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/adaptive/learner/integrator_learner.py b/adaptive/learner/integrator_learner.py index 9c0aeb008..e60f23383 100644 --- a/adaptive/learner/integrator_learner.py +++ b/adaptive/learner/integrator_learner.py @@ -591,3 +591,16 @@ def _set_data(self, data): self.x_mapping = defaultdict(lambda: SortedSet([], key=attrgetter("rdepth"))) for k, _set in x_mapping.items(): self.x_mapping[k].update(_set) + + def __getstate__(self): + return ( + self.function, + self.bounds, + self.tol, + self._get_data(), + ) + + def __setstate__(self, state): + function, bounds, tol, data = state + self.__init__(function, bounds, tol) + self._set_data(data) From cf7dad4656fcf4f1553eca93cdf21097c5f2a0ff Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:34:35 +0200 Subject: [PATCH 05/23] make SequenceLearner picklable --- adaptive/learner/sequence_learner.py | 32 +++++++++++++++++++--------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/adaptive/learner/sequence_learner.py b/adaptive/learner/sequence_learner.py index c7398dfa4..a9d517a8b 100644 --- a/adaptive/learner/sequence_learner.py +++ b/adaptive/learner/sequence_learner.py @@ -83,16 +83,6 @@ def ask(self, n, tell_pending=True): return points, loss_improvements - def _get_data(self): - return self.data - - def _set_data(self, data): - if data: - indices, values = zip(*data.items()) - # the points aren't used by tell, so we can safely pass None - points = [(i, None) for i in indices] - self.tell_many(points, values) - def loss(self, real=True): if not (self._to_do_indices or self.pending_points): return 0 @@ -128,3 +118,25 @@ def result(self): @property def npoints(self): return len(self.data) + + def _get_data(self): + return self.data + + def _set_data(self, data): + if data: + indices, values = zip(*data.items()) + # the points aren't used by tell, so we can safely pass None + points = [(i, None) for i in indices] + self.tell_many(points, values) + + def __getstate__(self): + return ( + self._original_function, + self.sequence, + self._get_data(), + ) + + def __setstate__(self, state): + function, sequence, data = state + self.__init__(function, sequence) + self._set_data(data) From b71a058d71d5183b9752efb8edb76450473b6077 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:37:47 +0200 Subject: [PATCH 06/23] make BalancingLearner picklable --- adaptive/learner/balancing_learner.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py index ce40afb22..6523d7eb4 100644 --- a/adaptive/learner/balancing_learner.py +++ b/adaptive/learner/balancing_learner.py @@ -440,3 +440,16 @@ def _get_data(self): def _set_data(self, data): for l, _data in zip(self.learners, data): l._set_data(_data) + + def __getstate__(self): + return ( + self.learners, + self._cdims_default, + self.strategy, + self._get_data(), + ) + + def __setstate__(self, state): + learners, cdims, strategy, data = state + self.__init__(learners, cdims=cdims, strategy=strategy) + self._set_data(data) From 7ff01d2f5b52caf100cd15897070275b53de9f5a Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Thu, 9 Apr 2020 22:39:48 +0200 Subject: [PATCH 07/23] make DataSaver picklable --- adaptive/learner/data_saver.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/adaptive/learner/data_saver.py b/adaptive/learner/data_saver.py index 14e246184..3b2561057 100644 --- a/adaptive/learner/data_saver.py +++ b/adaptive/learner/data_saver.py @@ -51,6 +51,18 @@ def _set_data(self, data): learner_data, self.extra_data = data self.learner._set_data(learner_data) + def __getstate__(self): + return ( + self.learner, + self.arg_picker, + self._get_data(), + ) + + def __setstate__(self, state): + learner, arg_picker, data = state + self.__init__(learner, arg_picker) + self._set_data(data) + @copy_docstring_from(BaseLearner.save) def save(self, fname, compress=True): # We copy this method because the 'DataSaver' is not a From 99fadf19a05fa915de0cca5ef347e41ffb46f474 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 10 Apr 2020 14:23:07 +0200 Subject: [PATCH 08/23] add tests for pickling --- adaptive/tests/test_pickling.py | 103 ++++++++++++++++++++++++++++++++ tox.ini | 2 +- 2 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 adaptive/tests/test_pickling.py diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py new file mode 100644 index 000000000..47ab68936 --- /dev/null +++ b/adaptive/tests/test_pickling.py @@ -0,0 +1,103 @@ +import random + +import cloudpickle +import pytest + +from adaptive.learner import ( + AverageLearner, + BalancingLearner, + DataSaver, + IntegratorLearner, + Learner1D, + Learner2D, + LearnerND, + SequenceLearner, +) +from adaptive.runner import simple + + +def goal_1(learner): + return learner.npoints >= 10 + + +def goal_2(learner): + return learner.npoints >= 20 + + +@pytest.mark.parametrize( + "learner_type, learner_kwargs", + [ + (Learner1D, dict(bounds=(-1, 1))), + (Learner2D, dict(bounds=[(-1, 1), (-1, 1)])), + (LearnerND, dict(bounds=[(-1, 1), (-1, 1), (-1, 1)])), + (SequenceLearner, dict(sequence=list(range(100)))), + (IntegratorLearner, dict(bounds=(0, 1), tol=1e-3)), + (AverageLearner, dict(atol=0.1)), + ], +) +def test_cloudpickle_for(learner_type, learner_kwargs): + """Test serializing a learner using cloudpickle. + + We use cloudpickle because with pickle the functions are only + pickled by reference.""" + + def f(x): + return random.random() + + learner = learner_type(f, **learner_kwargs) + + simple(learner, goal_1) + learner_bytes = cloudpickle.dumps(learner) + + # Delete references + del f + del learner + + learner_loaded = cloudpickle.loads(learner_bytes) + assert learner_loaded.npoints >= 10 + simple(learner_loaded, goal_2) + assert learner_loaded.npoints >= 20 + + +def test_cloudpickle_for_datasaver(): + def f(x): + return dict(x=1, y=x ** 2) + + _learner = Learner1D(f, bounds=(-1, 1)) + learner = DataSaver(_learner, arg_picker=lambda x: x["y"]) + + simple(learner, goal_1) + learner_bytes = cloudpickle.dumps(learner) + + # Delete references + del f + del _learner + del learner + + learner_loaded = cloudpickle.loads(learner_bytes) + assert learner_loaded.npoints >= 10 + simple(learner_loaded, goal_2) + assert learner_loaded.npoints >= 20 + + +def test_cloudpickle_for_balancing_learner(): + def f(x): + return x ** 2 + + learner_1 = Learner1D(f, bounds=(-1, 1)) + learner_2 = Learner1D(f, bounds=(-2, 2)) + learner = BalancingLearner([learner_1, learner_2]) + + simple(learner, goal_1) + learner_bytes = cloudpickle.dumps(learner) + + # Delete references + del f + del learner_1 + del learner_2 + del learner + + learner_loaded = cloudpickle.loads(learner_bytes) + assert learner_loaded.npoints >= 10 + simple(learner_loaded, goal_2) + assert learner_loaded.npoints >= 20 diff --git a/tox.ini b/tox.ini index 028c5c40d..3c4e58d58 100644 --- a/tox.ini +++ b/tox.ini @@ -65,4 +65,4 @@ include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 -known_third_party=PIL,atomicwrites,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers +known_third_party=PIL,atomicwrites,cloudpickle,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers From 583159288263a76053c61205f648a55317ff9d6a Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 10 Apr 2020 14:26:30 +0200 Subject: [PATCH 09/23] add cloudpickle to testing dependencies --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6f26cff50..9b8ea6f90 100644 --- a/setup.py +++ b/setup.py @@ -43,6 +43,7 @@ def get_version_and_cmdclass(package_name): "plotly", ], "testing": [ + "cloudpickle", "flaky", "pytest", "pytest-cov", @@ -51,8 +52,8 @@ def get_version_and_cmdclass(package_name): "pre_commit", ], "other": [ - "ipyparallel>=6.2.5", # because of https://github.com/ipython/ipyparallel/issues/404 "distributed", + "ipyparallel>=6.2.5", # because of https://github.com/ipython/ipyparallel/issues/404 "loky", "scikit-optimize", "wexpect" if os.name == "nt" else "pexpect", From dfd8b0c4366cb99a8bac024e4d0068af6da872e3 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 10 Apr 2020 15:18:14 +0200 Subject: [PATCH 10/23] test serialization with pickle, cloudpickle, and dill --- adaptive/tests/test_pickling.py | 101 +++++++++++++++++++++----------- setup.py | 1 + tox.ini | 2 +- 3 files changed, 70 insertions(+), 34 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 47ab68936..2fda61df8 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -1,6 +1,9 @@ +import operator +import pickle import random import cloudpickle +import dill import pytest from adaptive.learner import ( @@ -24,22 +27,37 @@ def goal_2(learner): return learner.npoints >= 20 +learners_pairs = [ + (Learner1D, dict(bounds=(-1, 1))), + (Learner2D, dict(bounds=[(-1, 1), (-1, 1)])), + (LearnerND, dict(bounds=[(-1, 1), (-1, 1), (-1, 1)])), + (SequenceLearner, dict(sequence=list(range(100)))), + (IntegratorLearner, dict(bounds=(0, 1), tol=1e-3)), + (AverageLearner, dict(atol=0.1)), +] + +serializers = (pickle, dill, cloudpickle) + +learners = [ + (learner_type, learner_kwargs, serializer) + for serializer in serializers + for learner_type, learner_kwargs in learners_pairs +] + + +def f_for_pickle_balancing_learner(x): + return 1 + + +def f_for_pickle_datasaver(x): + return dict(x=x, y=x) + + @pytest.mark.parametrize( - "learner_type, learner_kwargs", - [ - (Learner1D, dict(bounds=(-1, 1))), - (Learner2D, dict(bounds=[(-1, 1), (-1, 1)])), - (LearnerND, dict(bounds=[(-1, 1), (-1, 1), (-1, 1)])), - (SequenceLearner, dict(sequence=list(range(100)))), - (IntegratorLearner, dict(bounds=(0, 1), tol=1e-3)), - (AverageLearner, dict(atol=0.1)), - ], + "learner_type, learner_kwargs, serializer", learners, ) -def test_cloudpickle_for(learner_type, learner_kwargs): - """Test serializing a learner using cloudpickle. - - We use cloudpickle because with pickle the functions are only - pickled by reference.""" +def test_serialization_for(learner_type, learner_kwargs, serializer): + """Test serializing a learner using different serializers.""" def f(x): return random.random() @@ -49,9 +67,10 @@ def f(x): simple(learner, goal_1) learner_bytes = cloudpickle.dumps(learner) - # Delete references - del f - del learner + if serializer is not pickle: + # With pickle the functions are only pickled by reference + del f + del learner learner_loaded = cloudpickle.loads(learner_bytes) assert learner_loaded.npoints >= 10 @@ -59,45 +78,61 @@ def f(x): assert learner_loaded.npoints >= 20 -def test_cloudpickle_for_datasaver(): +@pytest.mark.parametrize( + "serializer", serializers, +) +def test_serialization_for_datasaver(serializer): def f(x): return dict(x=1, y=x ** 2) + if serializer is pickle: + # f from the local scope cannot be pickled + f = f_for_pickle_datasaver # noqa: F811 + _learner = Learner1D(f, bounds=(-1, 1)) - learner = DataSaver(_learner, arg_picker=lambda x: x["y"]) + learner = DataSaver(_learner, arg_picker=operator.itemgetter("y")) simple(learner, goal_1) - learner_bytes = cloudpickle.dumps(learner) + learner_bytes = serializer.dumps(learner) - # Delete references - del f - del _learner - del learner + if serializer is not pickle: + # With pickle the functions are only pickled by reference + del f + del _learner + del learner - learner_loaded = cloudpickle.loads(learner_bytes) + learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints >= 10 simple(learner_loaded, goal_2) assert learner_loaded.npoints >= 20 -def test_cloudpickle_for_balancing_learner(): +@pytest.mark.parametrize( + "serializer", serializers, +) +def test_serialization_for_balancing_learner(serializer): def f(x): return x ** 2 + if serializer is pickle: + # f from the local scope cannot be pickled + f = f_for_pickle_balancing_learner # noqa: F811 + learner_1 = Learner1D(f, bounds=(-1, 1)) learner_2 = Learner1D(f, bounds=(-2, 2)) learner = BalancingLearner([learner_1, learner_2]) simple(learner, goal_1) - learner_bytes = cloudpickle.dumps(learner) + learner_bytes = serializer.dumps(learner) - # Delete references - del f - del learner_1 - del learner_2 - del learner + if serializer is not pickle: + # With pickle the functions are only pickled by reference + del f + del learner_1 + del learner_2 + del learner - learner_loaded = cloudpickle.loads(learner_bytes) + learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints >= 10 simple(learner_loaded, goal_2) assert learner_loaded.npoints >= 20 diff --git a/setup.py b/setup.py index 9b8ea6f90..641709a63 100644 --- a/setup.py +++ b/setup.py @@ -44,6 +44,7 @@ def get_version_and_cmdclass(package_name): ], "testing": [ "cloudpickle", + "dill", "flaky", "pytest", "pytest-cov", diff --git a/tox.ini b/tox.ini index 3c4e58d58..afdbd64ac 100644 --- a/tox.ini +++ b/tox.ini @@ -65,4 +65,4 @@ include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 -known_third_party=PIL,atomicwrites,cloudpickle,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers +known_third_party=PIL,atomicwrites,cloudpickle,dill,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers From d6172e0c018fdf076a39a7cfcb1b56af4153985d Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Fri, 10 Apr 2020 16:22:24 +0200 Subject: [PATCH 11/23] only test cloudpickle and dill if installed --- adaptive/tests/test_pickling.py | 34 ++++++++++++++++++++++++++------- setup.py | 4 ++-- tox.ini | 2 +- 3 files changed, 30 insertions(+), 10 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 2fda61df8..b23489b71 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -2,8 +2,6 @@ import pickle import random -import cloudpickle -import dill import pytest from adaptive.learner import ( @@ -18,6 +16,20 @@ ) from adaptive.runner import simple +try: + import cloudpickle + + with_cloudpickle = True +except ModuleNotFoundError: + with_cloudpickle = False + +try: + import dill + + with_dill = True +except ModuleNotFoundError: + with_dill = False + def goal_1(learner): return learner.npoints >= 10 @@ -36,7 +48,11 @@ def goal_2(learner): (AverageLearner, dict(atol=0.1)), ] -serializers = (pickle, dill, cloudpickle) +serializers = [pickle] +if with_cloudpickle: + serializers.append(cloudpickle) +if with_dill: + serializers.append(dill) learners = [ (learner_type, learner_kwargs, serializer) @@ -45,7 +61,7 @@ def goal_2(learner): ] -def f_for_pickle_balancing_learner(x): +def f_for_pickle(x): return 1 @@ -62,17 +78,21 @@ def test_serialization_for(learner_type, learner_kwargs, serializer): def f(x): return random.random() + if serializer is pickle: + # f from the local scope cannot be pickled + f = f_for_pickle # noqa: F811 + learner = learner_type(f, **learner_kwargs) simple(learner, goal_1) - learner_bytes = cloudpickle.dumps(learner) + learner_bytes = serializer.dumps(learner) if serializer is not pickle: # With pickle the functions are only pickled by reference del f del learner - learner_loaded = cloudpickle.loads(learner_bytes) + learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints >= 10 simple(learner_loaded, goal_2) assert learner_loaded.npoints >= 20 @@ -116,7 +136,7 @@ def f(x): if serializer is pickle: # f from the local scope cannot be pickled - f = f_for_pickle_balancing_learner # noqa: F811 + f = f_for_pickle # noqa: F811 learner_1 = Learner1D(f, bounds=(-1, 1)) learner_2 = Learner1D(f, bounds=(-2, 2)) diff --git a/setup.py b/setup.py index 641709a63..2807428bb 100644 --- a/setup.py +++ b/setup.py @@ -43,8 +43,6 @@ def get_version_and_cmdclass(package_name): "plotly", ], "testing": [ - "cloudpickle", - "dill", "flaky", "pytest", "pytest-cov", @@ -53,6 +51,8 @@ def get_version_and_cmdclass(package_name): "pre_commit", ], "other": [ + "cloudpickle", + "dill", "distributed", "ipyparallel>=6.2.5", # because of https://github.com/ipython/ipyparallel/issues/404 "loky", diff --git a/tox.ini b/tox.ini index afdbd64ac..028c5c40d 100644 --- a/tox.ini +++ b/tox.ini @@ -65,4 +65,4 @@ include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 -known_third_party=PIL,atomicwrites,cloudpickle,dill,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers +known_third_party=PIL,atomicwrites,flaky,holoviews,matplotlib,nbconvert,numpy,pytest,scipy,setuptools,skopt,sortedcollections,sortedcontainers From 64bb2e6c254d2188d73c258c59df728ae6b5e41b Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sun, 12 Apr 2020 13:42:01 +0200 Subject: [PATCH 12/23] test for idential ask and loss response --- adaptive/tests/test_pickling.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index b23489b71..37a92c5f8 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -86,6 +86,8 @@ def f(x): simple(learner, goal_1) learner_bytes = serializer.dumps(learner) + loss = learner.loss() + asked = learner.ask(1) if serializer is not pickle: # With pickle the functions are only pickled by reference @@ -94,6 +96,15 @@ def f(x): learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints >= 10 + assert loss == learner_loaded.loss() + + if learner_type is not Learner2D: + # cannot test this for Learner2D because + # xfailing test_point_adding_order_is_irrelevant + assert asked == learner_loaded.ask(1) + # load again to undo the ask + learner_loaded = serializer.loads(learner_bytes) + simple(learner_loaded, goal_2) assert learner_loaded.npoints >= 20 From 7bc0ade5883dc972a41f35e1b72887a7c9f627bf Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sun, 12 Apr 2020 13:42:31 +0200 Subject: [PATCH 13/23] add flaky --- adaptive/tests/test_pickling.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 37a92c5f8..861c40b33 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -2,6 +2,7 @@ import pickle import random +import flaky import pytest from adaptive.learner import ( @@ -69,6 +70,7 @@ def f_for_pickle_datasaver(x): return dict(x=x, y=x) +@flaky.flaky(max_runs=3) @pytest.mark.parametrize( "learner_type, learner_kwargs, serializer", learners, ) From 978a62cef9128006c51299eecdfc684d28e7b8fc Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Sun, 12 Apr 2020 14:21:25 +0200 Subject: [PATCH 14/23] use an exact equality in checking the number of points --- adaptive/tests/test_pickling.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 861c40b33..572ae4efb 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -33,11 +33,11 @@ def goal_1(learner): - return learner.npoints >= 10 + return learner.npoints == 10 def goal_2(learner): - return learner.npoints >= 20 + return learner.npoints == 20 learners_pairs = [ @@ -97,7 +97,7 @@ def f(x): del learner learner_loaded = serializer.loads(learner_bytes) - assert learner_loaded.npoints >= 10 + assert learner_loaded.npoints == 10 assert loss == learner_loaded.loss() if learner_type is not Learner2D: @@ -108,7 +108,7 @@ def f(x): learner_loaded = serializer.loads(learner_bytes) simple(learner_loaded, goal_2) - assert learner_loaded.npoints >= 20 + assert learner_loaded.npoints == 20 @pytest.mark.parametrize( From 1a3166954cf00eb32b6243de62417160ad9b0308 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Mon, 13 Apr 2020 22:55:53 +0200 Subject: [PATCH 15/23] set learner._recompute_losses_factor = 1 --- adaptive/tests/test_pickling.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 572ae4efb..3b9b57c74 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -85,6 +85,8 @@ def f(x): f = f_for_pickle # noqa: F811 learner = learner_type(f, **learner_kwargs) + if learner_type is Learner1D: + learner._recompute_losses_factor = 1 simple(learner, goal_1) learner_bytes = serializer.dumps(learner) From 2e40ebb4a17ba171fc8c621161e31ed690ad44b6 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Mon, 13 Apr 2020 22:56:24 +0200 Subject: [PATCH 16/23] use exact equalities --- adaptive/tests/test_pickling.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 3b9b57c74..74cc90e37 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -137,9 +137,9 @@ def f(x): del learner learner_loaded = serializer.loads(learner_bytes) - assert learner_loaded.npoints >= 10 + assert learner_loaded.npoints == 10 simple(learner_loaded, goal_2) - assert learner_loaded.npoints >= 20 + assert learner_loaded.npoints == 20 @pytest.mark.parametrize( @@ -168,6 +168,6 @@ def f(x): del learner learner_loaded = serializer.loads(learner_bytes) - assert learner_loaded.npoints >= 10 + assert learner_loaded.npoints == 10 simple(learner_loaded, goal_2) - assert learner_loaded.npoints >= 20 + assert learner_loaded.npoints == 20 From 44c6446dea8473068b6dc33d657653e756b514b8 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Tue, 14 Apr 2020 11:41:51 +0200 Subject: [PATCH 17/23] make Learner1D's datastructures identical before and after pickling --- adaptive/learner/learner1D.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py index 43b1bf5cc..0bdb4a451 100644 --- a/adaptive/learner/learner1D.py +++ b/adaptive/learner/learner1D.py @@ -630,13 +630,17 @@ def __getstate__(self): self.function, self.bounds, self.loss_per_interval, + dict(self.losses), # SortedDict cannot be pickled + dict(self.losses_combined), # ItemSortedDict cannot be pickled self._get_data(), ) def __setstate__(self, state): - function, bounds, loss_per_interval, data = state + function, bounds, loss_per_interval, losses, losses_combined, data = state self.__init__(function, bounds, loss_per_interval) self._set_data(data) + self.losses.update(losses) + self.losses_combined.update(losses_combined) def loss_manager(x_scale): From f7a3b036a66d50830e301d38c54efc330868cd22 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Tue, 14 Apr 2020 11:42:53 +0200 Subject: [PATCH 18/23] make Learner2D's datastructures identical before and after pickling --- adaptive/learner/learner2D.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index 80361284a..955bd43af 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -712,10 +712,12 @@ def __getstate__(self): self.function, self.bounds, self.loss_per_triangle, + self._stack, self._get_data(), ) def __setstate__(self, state): - function, bounds, loss_per_triangle, data = state + function, bounds, loss_per_triangle, _stack, data = state self.__init__(function, bounds, loss_per_triangle) self._set_data(data) + self._stack = _stack From 30619d53e3712cfcb55e934c3b75780470b1a5a2 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Tue, 14 Apr 2020 11:44:59 +0200 Subject: [PATCH 19/23] do not specially treat Learner1D and Learner2D --- adaptive/tests/test_pickling.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 74cc90e37..4564e83d5 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -2,7 +2,6 @@ import pickle import random -import flaky import pytest from adaptive.learner import ( @@ -70,7 +69,6 @@ def f_for_pickle_datasaver(x): return dict(x=x, y=x) -@flaky.flaky(max_runs=3) @pytest.mark.parametrize( "learner_type, learner_kwargs, serializer", learners, ) @@ -85,8 +83,6 @@ def f(x): f = f_for_pickle # noqa: F811 learner = learner_type(f, **learner_kwargs) - if learner_type is Learner1D: - learner._recompute_losses_factor = 1 simple(learner, goal_1) learner_bytes = serializer.dumps(learner) @@ -102,12 +98,9 @@ def f(x): assert learner_loaded.npoints == 10 assert loss == learner_loaded.loss() - if learner_type is not Learner2D: - # cannot test this for Learner2D because - # xfailing test_point_adding_order_is_irrelevant - assert asked == learner_loaded.ask(1) - # load again to undo the ask - learner_loaded = serializer.loads(learner_bytes) + assert asked == learner_loaded.ask(1) + # load again to undo the ask + learner_loaded = serializer.loads(learner_bytes) simple(learner_loaded, goal_2) assert learner_loaded.npoints == 20 From 9dccd055d327d61054e7d241961a7989f30edd2c Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Tue, 14 Apr 2020 11:49:27 +0200 Subject: [PATCH 20/23] test for more points --- adaptive/tests/test_pickling.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 4564e83d5..73efa6c84 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -87,7 +87,8 @@ def f(x): simple(learner, goal_1) learner_bytes = serializer.dumps(learner) loss = learner.loss() - asked = learner.ask(1) + asked = learner.ask(10) + data = learner.data if serializer is not pickle: # With pickle the functions are only pickled by reference @@ -97,8 +98,10 @@ def f(x): learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints == 10 assert loss == learner_loaded.loss() + assert data == learner_loaded.data + + assert asked == learner_loaded.ask(10) - assert asked == learner_loaded.ask(1) # load again to undo the ask learner_loaded = serializer.loads(learner_bytes) From 1e4c495ab82ef419a5ad524c1a9703724a65c4f0 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Wed, 15 Apr 2020 23:06:17 +0200 Subject: [PATCH 21/23] refactor tests --- adaptive/tests/test_pickling.py | 128 ++++++++++---------------------- 1 file changed, 38 insertions(+), 90 deletions(-) diff --git a/adaptive/tests/test_pickling.py b/adaptive/tests/test_pickling.py index 73efa6c84..d648c90e0 100644 --- a/adaptive/tests/test_pickling.py +++ b/adaptive/tests/test_pickling.py @@ -1,6 +1,4 @@ -import operator import pickle -import random import pytest @@ -11,7 +9,6 @@ IntegratorLearner, Learner1D, Learner2D, - LearnerND, SequenceLearner, ) from adaptive.runner import simple @@ -39,49 +36,62 @@ def goal_2(learner): return learner.npoints == 20 +def pickleable_f(x): + return hash(str(x)) / 2 ** 63 + + +nonpickleable_f = lambda x: hash(str(x)) / 2 ** 63 # noqa: E731 + + +def identity_function(x): + return x + + +def datasaver(f, learner_type, learner_kwargs): + return DataSaver( + learner=learner_type(f, **learner_kwargs), arg_picker=identity_function + ) + + +def balancing_learner(f, learner_type, learner_kwargs): + learner_1 = learner_type(f, **learner_kwargs) + learner_2 = learner_type(f, **learner_kwargs) + return BalancingLearner([learner_1, learner_2]) + + learners_pairs = [ (Learner1D, dict(bounds=(-1, 1))), (Learner2D, dict(bounds=[(-1, 1), (-1, 1)])), - (LearnerND, dict(bounds=[(-1, 1), (-1, 1), (-1, 1)])), (SequenceLearner, dict(sequence=list(range(100)))), (IntegratorLearner, dict(bounds=(0, 1), tol=1e-3)), (AverageLearner, dict(atol=0.1)), + (datasaver, dict(learner_type=Learner1D, learner_kwargs=dict(bounds=(-1, 1)))), + ( + balancing_learner, + dict(learner_type=Learner1D, learner_kwargs=dict(bounds=(-1, 1))), + ), ] -serializers = [pickle] +serializers = [(pickle, pickleable_f)] if with_cloudpickle: - serializers.append(cloudpickle) + serializers.append((cloudpickle, nonpickleable_f)) if with_dill: - serializers.append(dill) + serializers.append((dill, nonpickleable_f)) + learners = [ - (learner_type, learner_kwargs, serializer) - for serializer in serializers + (learner_type, learner_kwargs, serializer, f) + for serializer, f in serializers for learner_type, learner_kwargs in learners_pairs ] -def f_for_pickle(x): - return 1 - - -def f_for_pickle_datasaver(x): - return dict(x=x, y=x) - - @pytest.mark.parametrize( - "learner_type, learner_kwargs, serializer", learners, + "learner_type, learner_kwargs, serializer, f", learners, ) -def test_serialization_for(learner_type, learner_kwargs, serializer): +def test_serialization_for(learner_type, learner_kwargs, serializer, f): """Test serializing a learner using different serializers.""" - def f(x): - return random.random() - - if serializer is pickle: - # f from the local scope cannot be pickled - f = f_for_pickle # noqa: F811 - learner = learner_type(f, **learner_kwargs) simple(learner, goal_1) @@ -90,10 +100,8 @@ def f(x): asked = learner.ask(10) data = learner.data - if serializer is not pickle: - # With pickle the functions are only pickled by reference - del f - del learner + del f + del learner learner_loaded = serializer.loads(learner_bytes) assert learner_loaded.npoints == 10 @@ -107,63 +115,3 @@ def f(x): simple(learner_loaded, goal_2) assert learner_loaded.npoints == 20 - - -@pytest.mark.parametrize( - "serializer", serializers, -) -def test_serialization_for_datasaver(serializer): - def f(x): - return dict(x=1, y=x ** 2) - - if serializer is pickle: - # f from the local scope cannot be pickled - f = f_for_pickle_datasaver # noqa: F811 - - _learner = Learner1D(f, bounds=(-1, 1)) - learner = DataSaver(_learner, arg_picker=operator.itemgetter("y")) - - simple(learner, goal_1) - learner_bytes = serializer.dumps(learner) - - if serializer is not pickle: - # With pickle the functions are only pickled by reference - del f - del _learner - del learner - - learner_loaded = serializer.loads(learner_bytes) - assert learner_loaded.npoints == 10 - simple(learner_loaded, goal_2) - assert learner_loaded.npoints == 20 - - -@pytest.mark.parametrize( - "serializer", serializers, -) -def test_serialization_for_balancing_learner(serializer): - def f(x): - return x ** 2 - - if serializer is pickle: - # f from the local scope cannot be pickled - f = f_for_pickle # noqa: F811 - - learner_1 = Learner1D(f, bounds=(-1, 1)) - learner_2 = Learner1D(f, bounds=(-2, 2)) - learner = BalancingLearner([learner_1, learner_2]) - - simple(learner, goal_1) - learner_bytes = serializer.dumps(learner) - - if serializer is not pickle: - # With pickle the functions are only pickled by reference - del f - del learner_1 - del learner_2 - del learner - - learner_loaded = serializer.loads(learner_bytes) - assert learner_loaded.npoints == 10 - simple(learner_loaded, goal_2) - assert learner_loaded.npoints == 20 From ca28f2e22592a29b5bb155f791e834db95a45e95 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Wed, 15 Apr 2020 23:06:54 +0200 Subject: [PATCH 22/23] do not initialize child-learners twice in BalancingLearner --- adaptive/learner/balancing_learner.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py index 6523d7eb4..c7cc79b66 100644 --- a/adaptive/learner/balancing_learner.py +++ b/adaptive/learner/balancing_learner.py @@ -446,10 +446,8 @@ def __getstate__(self): self.learners, self._cdims_default, self.strategy, - self._get_data(), ) def __setstate__(self, state): - learners, cdims, strategy, data = state + learners, cdims, strategy = state self.__init__(learners, cdims=cdims, strategy=strategy) - self._set_data(data) From acc5400c11cdcf2f9a41f0300d59caffa2d6a5d9 Mon Sep 17 00:00:00 2001 From: Bas Nijholt Date: Wed, 15 Apr 2020 23:07:41 +0200 Subject: [PATCH 23/23] do not initialize child-learners twice in DataSaver --- adaptive/learner/data_saver.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/adaptive/learner/data_saver.py b/adaptive/learner/data_saver.py index 3b2561057..c824e8ac2 100644 --- a/adaptive/learner/data_saver.py +++ b/adaptive/learner/data_saver.py @@ -55,13 +55,13 @@ def __getstate__(self): return ( self.learner, self.arg_picker, - self._get_data(), + self.extra_data, ) def __setstate__(self, state): - learner, arg_picker, data = state + learner, arg_picker, extra_data = state self.__init__(learner, arg_picker) - self._set_data(data) + self.extra_data = extra_data @copy_docstring_from(BaseLearner.save) def save(self, fname, compress=True):