From 2a5ca686ec20684913f7862c99ff432918e44f32 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 31 Jan 2018 16:19:46 -0500 Subject: [PATCH 01/17] beginning to refactor reinitialize as a method on integrator functions --- psyneulink/components/functions/function.py | 59 +++++++-------------- 1 file changed, 18 insertions(+), 41 deletions(-) diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 1d5e7d09565..e286b26f401 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -4189,20 +4189,14 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): return value + def reinitialize(self, new_previous_value): + self._initializer = new_previous_value + self.value = new_previous_value + self.previous_value = new_previous_value + def function(self, *args, **kwargs): raise FunctionError("Integrator is not meant to be called explicitly") - @property - def reinitialize(self): - return self.previous_value - - @reinitialize.setter - def reinitialize(self, val): - self._initializer = val - self.value = val - self.previous_value = val - - class SimpleIntegrator( Integrator): # -------------------------------------------------------------------------------- """ @@ -4634,18 +4628,12 @@ def function(self, return adjusted_value - @property - def reinitialize(self): - return self.previous_value - - @reinitialize.setter - def reinitialize(self, val): - self._initializer = val - self.value = val - self.previous_value = val + def reinitialize(self, new_previous_value): + self._initializer = new_previous_value + self.value = new_previous_value + self.previous_value = new_previous_value self.previous_time = 0.0 - class ConstantIntegrator(Integrator): # -------------------------------------------------------------------------------- """ ConstantIntegrator( \ @@ -5412,26 +5400,15 @@ def function(self, # Current output format is [[[decision_variable]], time] return adjusted_value - @property - def reinitialize(self): - return self.previous_value, self.previous_time - - @reinitialize.setter - def reinitialize(self, value): - try: - val, time = value - self._initializer = val - self.value = val - self.previous_value = val - self.previous_time = time - except (ValueError, TypeError): - num_items = len(np.atleast_1d(value)) - if num_items == 1: - raise FunctionError("DriftDiffusionIntegrator requires exactly two items (position, time) in order to " - "reinitialize. Only one item ({}) was provided to reinitialize {}.".format(value, self.name)) - - raise FunctionError("DriftDiffusionIntegrator requires exactly two items (position, time) in order to " - "reinitialize. {} items ({}) were provided to reinitialize {}.".format(num_items, value, self.name)) + def reinitialize(self, new_previous_value=None, new_previous_time=None): + if new_previous_value is None: + new_previous_value = self.instance_defaults.initializer + if new_previous_time is None: + new_previous_time = self.instance_defaults.t0 + self._initializer = new_previous_value + self.value = new_previous_value + self.previous_value = new_previous_value + self.previous_time = new_previous_time class OrnsteinUhlenbeckIntegrator( Integrator): # -------------------------------------------------------------------------------- From 390ca46bdc0c1df179f65c15775c73f7a6f21255 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 1 Feb 2018 15:21:15 -0500 Subject: [PATCH 02/17] adding a reinitialize method on mechanisms, which calls reinitialize on its function or integrator_function, then updates the mechanism's value and output states --- psyneulink/components/functions/function.py | 168 +++++++----------- psyneulink/components/mechanisms/mechanism.py | 19 +- 2 files changed, 81 insertions(+), 106 deletions(-) diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index e286b26f401..22b090c61a4 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -4189,11 +4189,11 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): return value - def reinitialize(self, new_previous_value): + def reinitialize(self, new_previous_value, **kwargs): self._initializer = new_previous_value self.value = new_previous_value self.previous_value = new_previous_value - + return self.value def function(self, *args, **kwargs): raise FunctionError("Integrator is not meant to be called explicitly") @@ -4628,12 +4628,6 @@ def function(self, return adjusted_value - def reinitialize(self, new_previous_value): - self._initializer = new_previous_value - self.value = new_previous_value - self.previous_value = new_previous_value - self.previous_time = 0.0 - class ConstantIntegrator(Integrator): # -------------------------------------------------------------------------------- """ ConstantIntegrator( \ @@ -5409,6 +5403,7 @@ def reinitialize(self, new_previous_value=None, new_previous_time=None): self.value = new_previous_value self.previous_value = new_previous_value self.previous_time = new_previous_time + return self.value class OrnsteinUhlenbeckIntegrator( Integrator): # -------------------------------------------------------------------------------- @@ -5654,26 +5649,16 @@ def function(self, return adjusted_value - @property - def reinitialize(self): - return self.previous_value, self.previous_time - - @reinitialize.setter - def reinitialize(self, value): - try: - val, time = value - self._initializer = val - self.value = val - self.previous_value = val - self.previous_time = time - except (ValueError, TypeError): - num_items = len(np.atleast_1d(value)) - if num_items == 1: - raise FunctionError("OrnsteinUhlenbeckIntegrator requires exactly two items (position, time) in order to " - "reinitialize. Only one item ({}) was provided to reinitialize {}.".format(value, self.name)) - - raise FunctionError("OrnsteinUhlenbeckIntegrator requires exactly two items (position, time) in order to " - "reinitialize. {} items ({}) were provided to reinitialize {}.".format(num_items, value, self.name)) + def reinitialize(self, new_previous_value=None, new_previous_time=None): + if new_previous_value is None: + new_previous_value = self.instance_defaults.initializer + if new_previous_time is None: + new_previous_time = self.instance_defaults.t0 + self._initializer = new_previous_value + self.value = new_previous_value + self.previous_value = new_previous_value + self.previous_time = new_previous_time + return self.value class FHNIntegrator(Integrator): # -------------------------------------------------------------------------------- """ @@ -6439,29 +6424,20 @@ def dw_dt(time, w, v, mode, a_w, b_w, c_w, uncorrelated_activity, time_constant_ return self.previous_v, self.previous_w, self.previous_time - @property - def reinitialize(self): - return self.previous_v, self.previous_w, self.previous_time - - @reinitialize.setter - def reinitialize(self, value): - try: - v, w, time = value - self._initial_v = v - self.previous_v = v - self._initial_w = w - self.previous_w = w - self.previous_time = time - self.value = v, w, time - - except (ValueError, TypeError): - num_items = len(np.atleast_1d(value)) - if num_items == 1: - raise FunctionError("FHNIntegrator requires exactly three items (v, w, time) in order to " - "reinitialize. Only one item ({}) was provided to reinitialize {}.".format(value, self.name)) - - raise FunctionError("FHNIntegrator requires exactly three items (v, w, time) in order to " - "reinitialize. {} items ({}) were provided to reinitialize {}.".format(num_items, value, self.name)) + def reinitialize(self, new_previous_v=None, new_previous_w=None, new_previous_time=None): + if new_previous_v is None: + new_previous_v = self.instance_defaults.initial_v + if new_previous_w is None: + new_previous_w = self.instance_defaults.initial_w + if new_previous_time is None: + new_previous_time = self.instance_defaults.t_0 + self._initial_v = new_previous_v + self.previous_v = new_previous_v + self._initial_w = new_previous_w + self.previous_w = new_previous_w + self.previous_time = new_previous_time + self.value = new_previous_v, new_previous_w, new_previous_time + return self.value class AccumulatorIntegrator(Integrator): # -------------------------------------------------------------------------------- """ @@ -7111,39 +7087,47 @@ def function(self, """ variable = self._update_variable(self._check_args(variable=variable, params=params, context=context)) - rate = np.array(self.get_current_function_param(RATE)).astype(float) - offset = self.get_current_function_param(OFFSET) # execute noise if it is a function noise = self._try_execute_param(self.get_current_function_param(NOISE), variable) - long_term_rate = self.get_current_function_param("long_term_rate") - long_term_gain = self.get_current_function_param("long_term_gain") - long_term_bias = self.get_current_function_param("long_term_bias") short_term_rate = self.get_current_function_param("short_term_rate") - short_term_gain = self.get_current_function_param("short_term_gain") - short_term_bias = self.get_current_function_param("short_term_bias") - operation = self.get_current_function_param(OPERATION) + long_term_rate = self.get_current_function_param("long_term_rate") - # long term params applied to variable + # Integrate Short Term Utility: + short_term_utility=self._EWMA_filter(self.previous_short_term_utility, + short_term_rate, + variable) + # Integrate Long Term Utility: long_term_utility = self._EWMA_filter(self.previous_long_term_utility, long_term_rate, variable) - long_term_utility_logistic = self._logistic(variable=long_term_utility, - gain=long_term_gain, - bias=long_term_bias - ) - self.long_term_utility_logistic = long_term_utility_logistic - # short term params applied to variable - short_term_utility=self._EWMA_filter(self.previous_short_term_utility, - short_term_rate, - variable) + value = self.combine_utilities(short_term_utility, long_term_utility) + + if not context or not INITIALIZING in context: + self.previous_short_term_utility = short_term_utility + self.previous_long_term_utility = long_term_utility + + return value + + def combine_utilities(self, short_term_utility, long_term_utility): + short_term_gain = self.get_current_function_param("short_term_gain") + short_term_bias = self.get_current_function_param("short_term_bias") + long_term_gain = self.get_current_function_param("long_term_gain") + long_term_bias = self.get_current_function_param("long_term_bias") + operation = self.get_current_function_param(OPERATION) + offset = self.get_current_function_param(OFFSET) + short_term_utility_logistic=self._logistic(variable=short_term_utility, gain=short_term_gain, - bias=short_term_bias - ) + bias=short_term_bias) self.short_term_utility_logistic = short_term_utility_logistic + long_term_utility_logistic = self._logistic(variable=long_term_utility, + gain=long_term_gain, + bias=long_term_bias) + self.long_term_utility_logistic = long_term_utility_logistic + if operation == "s*l": # Engagement in current task = [1—logistic(short term utility)]*[logistic{long - term utility}] value = (1-short_term_utility_logistic)*long_term_utility_logistic @@ -7157,41 +7141,15 @@ def function(self, # Engagement in current task = [logistic{long - term utility}] - [1—logistic(short term utility)] value = long_term_utility_logistic - (1-short_term_utility_logistic) - adjusted_value = value + offset - # If this NOT an initialization run, update the old utility values - # If it IS an initialization run, leave as is - # (don't want to count it as an execution step) - - if not context or not INITIALIZING in context: - self.previous_long_term_utility = long_term_utility - self.previous_short_term_utility = short_term_utility - - return adjusted_value - - @property - def reinitialize(self): - return self.previous_short_term_utility, self.previous_long_term_utility - - @reinitialize.setter - def reinitialize(self, value): - try: - short, long = value - self._initial_short_term_utility = short - self.previous_short_term_utility = short - self._initial_long_term_utility = long - self.previous_long_term_utility = long - - except (ValueError, TypeError): - num_items = len(np.atleast_1d(value)) - if num_items == 1: - raise FunctionError("AGTUtilityIntegrator requires exactly two items (short term utility, long term utility) in order to " - "reinitialize. Only one item ({}) was provided to reinitialize {}.".format(value, - self.name)) - raise FunctionError("AGTUtilityIntegrator requires exactly two items (short term utility, long term utility) in order to " - "reinitialize. {} items ({}) were provided to reinitialize {}.".format(num_items, value, - self.name)) - + return value + offset + def reinitialize(self, short, long): + self._initial_short_term_utility = short + self.previous_short_term_utility = short + self._initial_long_term_utility = long + self.previous_long_term_utility = long + self.value = self.combine_utilities(long, short) + return self.value # Note: For any of these that correspond to args, value must match the name of the corresponding arg in __init__() DRIFT_RATE = 'drift_rate' DRIFT_RATE_VARIABILITY = 'DDM_DriftRateVariability' diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index da516b34886..a0cedffbdaf 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -1793,6 +1793,24 @@ def _add_projection_from_mechanism(self, receiver, state, projection, context=No from psyneulink.components.projections.projection import _add_projection_from _add_projection_from(sender=self, state=state, projection_spec=projection, receiver=receiver, context=context) + def reinitialize(self, *args): + from psyneulink.components.functions.function import Integrator + + # If the primary function of the mechanism is an integrator: + # (1) reinitialize it, (2) update value, (3) update output states + if isinstance(self.function_object, Integrator): + new_value = self.function_object.reinitialize(*args) + self.value = new_value + self._update_output_states(context="REINITIALIZING") + + # If the mechanism has an auxiliary integrator function: + # (1) reinitialize it, (2) run the primary function with the new "previous_value" as input + # (3) update value, (4) update output states + elif hasattr(self, "integrator_function"): + new_input = self.function_object.reinitialize(*args) + self.value = self.function(new_input, context="REINITIALIZING") + self._update_output_states(context="REINITIALIZING") + def get_current_mechanism_param(self, param_name): try: return self._parameter_states[param_name].value @@ -2320,7 +2338,6 @@ def add_states(self, states, context=ADD_STATES): from psyneulink.components.states.state import _parse_state_type from psyneulink.components.states.inputstate import InputState, _instantiate_input_states from psyneulink.components.states.outputstate import OutputState, _instantiate_output_states - # Put in list to standardize treatment below if not isinstance(states, list): states = [states] From 677cead80afaf6724d5b63237283cfa4c4ed4c2b Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 2 Feb 2018 14:15:07 -0500 Subject: [PATCH 03/17] adding pytests for reinitializing at both function and mechanism level; cleaning up mistakes in reinitialize() methods along the way --- psyneulink/components/functions/function.py | 31 ++- psyneulink/components/mechanisms/mechanism.py | 2 +- .../mechanisms/processing/integrator/ddm.py | 10 + tests/mechanisms/test_ddm_mechanism.py | 92 ++++--- tests/mechanisms/test_integrator_mechanism.py | 240 ++++++++++++------ 5 files changed, 257 insertions(+), 118 deletions(-) diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 22b090c61a4..14f56208c0c 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -4189,7 +4189,9 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): return value - def reinitialize(self, new_previous_value, **kwargs): + def reinitialize(self, new_previous_value=None, **kwargs): + if new_previous_value is None: + new_previous_value = self.instance_defaults.initializer self._initializer = new_previous_value self.value = new_previous_value self.previous_value = new_previous_value @@ -5403,7 +5405,7 @@ def reinitialize(self, new_previous_value=None, new_previous_time=None): self.value = new_previous_value self.previous_value = new_previous_value self.previous_time = new_previous_time - return self.value + return np.atleast_1d(new_previous_value), np.atleast_1d(new_previous_time) class OrnsteinUhlenbeckIntegrator( Integrator): # -------------------------------------------------------------------------------- @@ -7094,13 +7096,13 @@ def function(self, long_term_rate = self.get_current_function_param("long_term_rate") # Integrate Short Term Utility: - short_term_utility=self._EWMA_filter(self.previous_short_term_utility, - short_term_rate, - variable) + short_term_utility = self._EWMA_filter(self.previous_short_term_utility, + short_term_rate, + variable) # Integrate Long Term Utility: long_term_utility = self._EWMA_filter(self.previous_long_term_utility, - long_term_rate, - variable) + long_term_rate, + variable) value = self.combine_utilities(short_term_utility, long_term_utility) @@ -7118,9 +7120,9 @@ def combine_utilities(self, short_term_utility, long_term_utility): operation = self.get_current_function_param(OPERATION) offset = self.get_current_function_param(OFFSET) - short_term_utility_logistic=self._logistic(variable=short_term_utility, - gain=short_term_gain, - bias=short_term_bias) + short_term_utility_logistic = self._logistic(variable=short_term_utility, + gain=short_term_gain, + bias=short_term_bias) self.short_term_utility_logistic = short_term_utility_logistic long_term_utility_logistic = self._logistic(variable=long_term_utility, @@ -7143,13 +7145,18 @@ def combine_utilities(self, short_term_utility, long_term_utility): return value + offset - def reinitialize(self, short, long): + def reinitialize(self, short=None, long=None): + if short is None: + short = self.instance_defaults.initial_short_term_utility + if long is None: + long = self.instance_defaults.initial_long_term_utility self._initial_short_term_utility = short self.previous_short_term_utility = short self._initial_long_term_utility = long self.previous_long_term_utility = long - self.value = self.combine_utilities(long, short) + self.value = self.combine_utilities(short, long) return self.value +# # Note: For any of these that correspond to args, value must match the name of the corresponding arg in __init__() DRIFT_RATE = 'drift_rate' DRIFT_RATE_VARIABILITY = 'DDM_DriftRateVariability' diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index a0cedffbdaf..ba59392e267 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -1800,7 +1800,7 @@ def reinitialize(self, *args): # (1) reinitialize it, (2) update value, (3) update output states if isinstance(self.function_object, Integrator): new_value = self.function_object.reinitialize(*args) - self.value = new_value + self.value = np.atleast_2d(new_value) self._update_output_states(context="REINITIALIZING") # If the mechanism has an auxiliary integrator function: diff --git a/psyneulink/library/mechanisms/processing/integrator/ddm.py b/psyneulink/library/mechanisms/processing/integrator/ddm.py index b0859523c11..b8263ba0e61 100644 --- a/psyneulink/library/mechanisms/processing/integrator/ddm.py +++ b/psyneulink/library/mechanisms/processing/integrator/ddm.py @@ -940,6 +940,15 @@ def _execute(self, # """ # # IMPLEMENTATION NOTE: TBI when time_step is implemented for DDM + def reinitialize(self, *args): + from psyneulink.components.functions.function import Integrator + + # (1) reinitialize function, (2) update mechanism value, (3) update output states + if isinstance(self.function_object, Integrator): + new_values = self.function_object.reinitialize(*args) + self.value = np.array(new_values) + self._update_output_states(context="REINITIALIZING") + @property def is_finished(self): # find the single numeric entry in previous_value @@ -961,3 +970,4 @@ def is_finished(self): THRESHOLD))) return True return self._is_finished + diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index fa90986165f..d3f0bc48ee9 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -11,38 +11,68 @@ from psyneulink.scheduling.time import TimeScale class TestReinitialize: - def test_valid_reinitialization(self): - D = DDM(name="D", - function=DriftDiffusionIntegrator()) + + def test_valid(self): + D = DDM( + name='DDM', + function=DriftDiffusionIntegrator(), + ) + + # returns previous_value + rate * variable * time_step_size + noise + # 0.0 + 1.0 * 1.0 * 1.0 + 0.0 + D.execute(1.0) + assert np.allclose(D.value, [[1.0], [1.0]]) + assert np.allclose(D.output_states[0].value, 1.0) + assert np.allclose(D.output_states[1].value, 1.0) + + # reinitialize function + D.function_object.reinitialize(2.0, 0.1) + assert np.allclose(D.function_object.value, 2.0) + assert np.allclose(D.function_object.previous_value, 2.0) + assert np.allclose(D.function_object.previous_time, 0.1) + assert np.allclose(D.value, [[1.0], [1.0]]) + assert np.allclose(D.output_states[0].value, 1.0) + assert np.allclose(D.output_states[1].value, 1.0) + + # reinitialize function without value spec + D.function_object.reinitialize() + assert np.allclose(D.function_object.value, 0.0) + assert np.allclose(D.function_object.previous_value, 0.0) + assert np.allclose(D.function_object.previous_time, 0.0) + assert np.allclose(D.value, [[1.0], [1.0]]) + assert np.allclose(D.output_states[0].value, 1.0) + assert np.allclose(D.output_states[1].value, 1.0) + + # reinitialize mechanism + D.reinitialize(2.0, 0.1) + assert np.allclose(D.function_object.value, 2.0) + assert np.allclose(D.function_object.previous_value, 2.0) + assert np.allclose(D.function_object.previous_time, 0.1) + assert np.allclose(D.value, [[2.0], [0.1]]) + assert np.allclose(D.output_states[0].value, 2.0) + assert np.allclose(D.output_states[1].value, 0.1) + D.execute(1.0) - assert np.allclose([[1.0]], D.function_object.reinitialize[0]) - assert np.allclose([1.0], D.function_object.reinitialize[1]) - - D.execute(2.0) - assert np.allclose([[3.0]], D.function_object.reinitialize[0]) - assert np.allclose([2.0], D.function_object.reinitialize[1]) - - D.function_object.reinitialize = 4.0, 0.1 - - D.execute(2.0) - assert np.allclose([[6.0]], D.function_object.reinitialize[0]) - assert np.allclose([1.1], D.function_object.reinitialize[1]) - - def test_invalid_reinitialization_too_many_items(self): - D = DDM(name="D", - function=DriftDiffusionIntegrator()) - with pytest.raises(FunctionError) as error_text: - D.function_object.reinitialize = 4.0, 0.1, 10.0 - assert("DriftDiffusionIntegrator requires exactly two items (position, time) in order to reinitialize" in - str(error_text.value) and "3 items ((4.0, 0.1, 10.0)) were provided to reinitialize" in str(error_text.value)) - - def test_invalid_reinitialization_too_few_items(self): - D = DDM(name="D", - function=DriftDiffusionIntegrator()) - with pytest.raises(FunctionError) as error_text: - D.function_object.reinitialize = 4.0 - assert("DriftDiffusionIntegrator requires exactly two items (position, time) in order to reinitialize. Only " - "one item (4.0) was provided to reinitialize" in str(error_text.value)) + # 2.0 + 1.0 = 3.0 ; 0.1 + 1.0 = 1.1 + assert np.allclose(D.value, [[[3.0]], [[1.1]]]) + assert np.allclose(D.output_states[0].value, 3.0) + assert np.allclose(D.output_states[1].value, 1.1) + + # reinitialize mechanism without value spec + D.reinitialize() + assert np.allclose(D.function_object.value, 0.0) + assert np.allclose(D.function_object.previous_value, 0.0) + assert np.allclose(D.function_object.previous_time, 0.0) + assert np.allclose(D.output_states[0].value[0], 0.0) + assert np.allclose(D.output_states[1].value[0], 0.0) + + # reinitialize only decision variable + D.reinitialize(1.0) + assert np.allclose(D.function_object.value, 1.0) + assert np.allclose(D.function_object.previous_value, 1.0) + assert np.allclose(D.function_object.previous_time, 0.0) + assert np.allclose(D.output_states[0].value[0], 1.0) + assert np.allclose(D.output_states[1].value[0], 0.0) diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index 73d2eaa5f66..49c92351b54 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -10,121 +10,213 @@ class TestReinitialize: - def test_FHN_valid_reinitialization(self): + def test_FHN_valid(self): I = IntegratorMechanism(name="I", function=FHNIntegrator()) + I.execute(1.0) - assert np.allclose([[0.05127053]], I.function_object.reinitialize[0]) - assert np.allclose([[0.00276967]], I.function_object.reinitialize[1]) - assert np.allclose([[ 0.05]], I.function_object.reinitialize[2]) - I.function_object.reinitialize = 0.01, 0.02, 0.03 + assert np.allclose([[0.05127053]], I.value[0]) + assert np.allclose([[0.00276967]], I.value[1]) + assert np.allclose([[0.05]], I.value[2]) - I.execute(1.0) - assert np.allclose([[0.06075727]], I.function_object.reinitialize[0]) - assert np.allclose([[0.02274597]], I.function_object.reinitialize[1]) - assert np.allclose([[0.08]], I.function_object.reinitialize[2]) + I.function_object.reinitialize(0.01, 0.02, 0.03) - def test_FHN_invalid_reinitialization_too_many_items(self): - I = IntegratorMechanism(name="I", - function=FHNIntegrator()) - with pytest.raises(FunctionError) as error_text: - I.function_object.reinitialize = 4.0, 0.1, 10.0, 20.0 - assert("FHNIntegrator requires exactly three items (v, w, time) in order to reinitialize" in - str(error_text.value) and "4 items ((4.0, 0.1, 10.0, 20.0)) were provided to reinitialize" in str(error_text.value)) + assert np.allclose(0.01, I.function_object.value[0]) + assert np.allclose(0.02, I.function_object.value[1]) + assert np.allclose(0.03, I.function_object.value[2]) - def test_FHN_invalid_reinitialization_too_few_items(self): - I = IntegratorMechanism(name="I", - function=FHNIntegrator()) - with pytest.raises(FunctionError) as error_text: - I.function_object.reinitialize = 4.0 - assert("FHNIntegrator requires exactly three items (v, w, time) in order to reinitialize. Only " - "one item (4.0) was provided to reinitialize" in str(error_text.value)) + assert np.allclose([[0.05127053]], I.value[0]) + assert np.allclose([[0.00276967]], I.value[1]) + assert np.allclose([[0.05]], I.value[2]) + + assert np.allclose([[0.05127053]], I.output_states[0].value) - def test_AGTUtility_valid_reinitialization(self): - I = IntegratorMechanism(name="I", - function=AGTUtilityIntegrator()) I.execute(1.0) - assert np.allclose([[0.9]], I.function_object.reinitialize[0]) - assert np.allclose([[0.1]], I.function_object.reinitialize[1]) - I.function_object.reinitialize = 0.2, 0.08 + assert np.allclose([[0.06075727]], I.value[0]) + assert np.allclose([[0.02274597]], I.value[1]) + assert np.allclose([[0.08]], I.value[2]) - assert np.allclose([[0.2]], I.function_object.reinitialize[0]) - assert np.allclose([[0.08]], I.function_object.reinitialize[1]) + assert np.allclose([[0.06075727]], I.output_states[0].value) - def test_AGTUtility_invalid_reinitialization_too_many_items(self): - I = IntegratorMechanism(name="I", - function=AGTUtilityIntegrator()) - with pytest.raises(FunctionError) as error_text: - I.function_object.reinitialize = 4.0, 0.1, 10.0 - assert("AGTUtilityIntegrator requires exactly two items (short term utility, long term utility) in order to reinitialize" in - str(error_text.value) and "3 items ((4.0, 0.1, 10.0)) were provided to reinitialize" in str(error_text.value)) + # I.reinitialize(new_previous_v=0.01, new_previous_w=0.02, new_previous_time=0.03) + I.reinitialize(0.01, 0.02, 0.03) + + assert np.allclose(0.01, I.value[0]) + assert np.allclose(0.02, I.value[1]) + assert np.allclose(0.03, I.value[2]) - def test_AGTUtility_invalid_reinitialization_too_few_items(self): + assert np.allclose(0.01, I.output_states[0].value) + # assert np.allclose(0.01, I.output_state.value[0]) + # assert np.allclose(0.02, I.output_state.value[1]) + # assert np.allclose(0.03, I.output_state.value[2]) + + def test_AGTUtility_valid(self): I = IntegratorMechanism(name="I", function=AGTUtilityIntegrator()) - with pytest.raises(FunctionError) as error_text: - I.function_object.reinitialize = 4.0 - assert("AGTUtilityIntegrator requires exactly two items (short term utility, long term utility) in order to reinitialize. Only " - "one item (4.0) was provided to reinitialize" in str(error_text.value)) - def test_integrator_simple_with_reinitialize(self): + assert np.allclose([[0.0]], I.function_object.initial_short_term_utility) + assert np.allclose([[0.0]], I.function_object.initial_long_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_short_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_long_term_utility) + + I.function_object.reinitialize(0.2, 0.8) + + assert np.allclose([[0.2]], I.function_object.initial_short_term_utility) + assert np.allclose([[0.8]], I.function_object.initial_long_term_utility) + assert np.allclose([[0.2]], I.function_object.previous_short_term_utility) + assert np.allclose([[0.8]], I.function_object.previous_long_term_utility) + + I.function_object.reinitialize() + + assert np.allclose([[0.0]], I.function_object.initial_short_term_utility) + assert np.allclose([[0.0]], I.function_object.initial_long_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_short_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_long_term_utility) + + I.reinitialize(0.3, 0.7) + + assert np.allclose([[0.3]], I.function_object.initial_short_term_utility) + assert np.allclose([[0.7]], I.function_object.initial_long_term_utility) + assert np.allclose([[0.3]], I.function_object.previous_short_term_utility) + assert np.allclose([[0.7]], I.function_object.previous_long_term_utility) + assert np.allclose(I.function_object.combine_utilities(0.3, 0.7), I.value) + + I.reinitialize() + + assert np.allclose([[0.0]], I.function_object.initial_short_term_utility) + assert np.allclose([[0.0]], I.function_object.initial_long_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_short_term_utility) + assert np.allclose([[0.0]], I.function_object.previous_long_term_utility) + assert np.allclose(I.function_object.combine_utilities(0.0, 0.0), I.value) + + def test_Simple_valid(self): I = IntegratorMechanism( name='IntegratorMechanism', function=SimpleIntegrator( ), ) - # # P = Process(pathway=[I]) # returns previous_value + rate*variable + noise # so in this case, returns 10.0 - val = float(I.execute(10)) - - # testing initializer - I.function_object.reinitialize = 5.0 - - val2 = float(I.execute(0)) - - assert [val, val2] == [10.0, 5.0] - - def test_integrator_adaptive_with_reinitialize(self): + I.execute(10) + assert np.allclose(I.value, 10.0) + assert np.allclose(I.output_state.value, 10.0) + + # reinitialize function + I.function_object.reinitialize(5.0) + assert np.allclose(I.function_object.value, 5.0) + assert np.allclose(I.value, 10.0) + assert np.allclose(I.output_states[0].value, 10.0) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 10.0) + assert np.allclose(I.output_states[0].value, 10.0) + + # reinitialize mechanism + I.reinitialize(4.0) + assert np.allclose(I.function_object.value, 4.0) + assert np.allclose(I.value, 4.0) + assert np.allclose(I.output_states[0].value, 4.0) + + I.execute(1) + assert np.allclose(I.value, 5.0) + assert np.allclose(I.output_states[0].value, 5.0) + + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) + + def test_Adaptive_valid(self): I = IntegratorMechanism( name='IntegratorMechanism', function=AdaptiveIntegrator( rate=0.5 ), ) - # val = float(I.execute(10)[0]) - # P = Process(pathway=[I]) - val = float(I.execute(10)) - # returns (rate)*variable + (1-rate*previous_value) + noise - # rate = 1, noise = 0, so in this case, returns 10.0 - # testing initializer - I.function_object.reinitialize = 1.0 - val2 = float(I.execute(1)) + # returns (1-rate)*previous_value + rate*variable + noise + # so in this case, returns 0.5*0 + 0.5*10 + 0 = 5.0 + I.execute(10) + assert np.allclose(I.value, 5.0) + assert np.allclose(I.output_state.value, 5.0) + + # reinitialize function + I.function_object.reinitialize(1.0) + assert np.allclose(I.function_object.value, 1.0) + assert np.allclose(I.value, 5.0) + assert np.allclose(I.output_states[0].value, 5.0) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 5.0) + assert np.allclose(I.output_states[0].value, 5.0) + + # reinitialize mechanism + I.reinitialize(2.0) + assert np.allclose(I.function_object.value, 2.0) + assert np.allclose(I.value, 2.0) + assert np.allclose(I.output_states[0].value, 2.0) + + I.execute(1.0) + # (1-0.5)*2.0 + 0.5*1.0 + 0 = 1.5 + assert np.allclose(I.value, 1.5) + assert np.allclose(I.output_states[0].value, 1.5) - assert [val, val2] == [5.0, 1.0] + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) - def test_integrator_constant_with_reinitialize(self): + def test_Constant_valid(self): I = IntegratorMechanism( name='IntegratorMechanism', function=ConstantIntegrator( rate=1.0 ), ) - # val = float(I.execute(10)[0]) - # P = Process(pathway=[I]) - val = float(I.execute()) - # returns previous_value + rate + noise - # rate = 1.0, noise = 0, so in this case returns 1.0 - # testing initializer - I.function_object.reinitialize = 10.0 - val2 = float(I.execute()) + # returns previous_value + rate + noise + # so in this case, returns 0.0 + 1.0 + I.execute(1000) + assert np.allclose(I.value, 1.0) + assert np.allclose(I.output_state.value, 1.0) + + # reinitialize function + I.function_object.reinitialize(2.0) + assert np.allclose(I.function_object.value, 2.0) + assert np.allclose(I.value, 1.0) + assert np.allclose(I.output_states[0].value, 1.0) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 1.0) + assert np.allclose(I.output_states[0].value, 1.0) + + # reinitialize mechanism + I.reinitialize(2.0) + assert np.allclose(I.function_object.value, 2.0) + assert np.allclose(I.value, 2.0) + assert np.allclose(I.output_states[0].value, 2.0) - assert [val, val2] == [1.0, 11.0] + I.execute(1.0) + # 2.0 + 1.0 = 3.0 + assert np.allclose(I.value, 3.0) + assert np.allclose(I.output_states[0].value, 3.0) + + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) class TestIntegratorFunctions: From d12e361b62134c687083e9eb189fad8757acd929 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 2 Feb 2018 17:01:59 -0500 Subject: [PATCH 04/17] adding more pytests for reinitialize and fixing reinitialize bugs; beginning to update documentation --- psyneulink/components/functions/function.py | 23 +-- psyneulink/components/mechanisms/mechanism.py | 4 +- tests/mechanisms/test_integrator_mechanism.py | 132 +++++++++++++++++- tests/mechanisms/test_transfer_mechanism.py | 13 +- 4 files changed, 155 insertions(+), 17 deletions(-) diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index 14f56208c0c..9546d8a9fa0 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -3984,15 +3984,6 @@ class Integrator(IntegratorFunction): # --------------------------------------- previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - owner : Component `component ` to which the Function has been assigned. @@ -4190,12 +4181,22 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): return value def reinitialize(self, new_previous_value=None, **kwargs): + """ + Sets + + - `previous_value ` + - `initializer ` + - `value ` + + to the quantity specified, which effectively begins accumulation over again at the specified value + """ if new_previous_value is None: new_previous_value = self.instance_defaults.initializer self._initializer = new_previous_value self.value = new_previous_value self.previous_value = new_previous_value return self.value + def function(self, *args, **kwargs): raise FunctionError("Integrator is not meant to be called explicitly") @@ -5558,7 +5559,7 @@ def __init__(self, offset: parameter_spec = 0.0, time_step_size=1.0, t0=0.0, - decay = 1.0, + decay=1.0, initializer=ClassDefaults.variable, params: tc.optional(dict) = None, owner=None, @@ -6439,7 +6440,7 @@ def reinitialize(self, new_previous_v=None, new_previous_w=None, new_previous_ti self.previous_w = new_previous_w self.previous_time = new_previous_time self.value = new_previous_v, new_previous_w, new_previous_time - return self.value + return [new_previous_v], [new_previous_w], [new_previous_time] class AccumulatorIntegrator(Integrator): # -------------------------------------------------------------------------------- """ diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index ba59392e267..47a7a3031f1 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -1807,7 +1807,9 @@ def reinitialize(self, *args): # (1) reinitialize it, (2) run the primary function with the new "previous_value" as input # (3) update value, (4) update output states elif hasattr(self, "integrator_function"): - new_input = self.function_object.reinitialize(*args) + new_input = self.integrator_function.reinitialize(*args) + if hasattr(self, "initial_value"): + self.initial_value = np.atleast_1d(*args)[0] self.value = self.function(new_input, context="REINITIALIZING") self._update_output_states(context="REINITIALIZING") diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index 49c92351b54..d158d252830 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -3,9 +3,10 @@ from psyneulink.components.functions.function import AGTUtilityIntegrator, AdaptiveIntegrator, DriftDiffusionIntegrator, OrnsteinUhlenbeckIntegrator from psyneulink.components.functions.function import AccumulatorIntegrator, ConstantIntegrator, FHNIntegrator, NormalDist, SimpleIntegrator -from psyneulink.components.functions.function import FunctionError +from psyneulink.components.functions.function import LCAIntegrator, FunctionError from psyneulink.components.mechanisms.mechanism import MechanismError from psyneulink.components.mechanisms.processing.integratormechanism import IntegratorMechanism + from psyneulink.scheduling.time import TimeScale @@ -218,6 +219,133 @@ def test_Constant_valid(self): assert np.allclose(I.value, 0.0) assert np.allclose(I.output_states[0].value, 0.0) + def test_OU_valid(self): + I = IntegratorMechanism( + name='IntegratorMechanism', + function=OrnsteinUhlenbeckIntegrator(), + ) + + # previous_value + (decay * previous_value - rate * variable) * time_step_size + noise + # decay=1.0, initializer=0.0, rate=1.0, time_step_size=1.0, noise=0.0 + # returns 0.0 + (1.0*0.0 - 1.0*10.0*1.0) + 0.0 = -10.0 + I.execute(2.0) + assert np.allclose(I.value, -2.0) + assert np.allclose(I.output_state.value, -2.0) + + # reinitialize function + I.function_object.reinitialize(5.0) + assert np.allclose(I.function_object.value, 5.0) + assert np.allclose(I.value, -2.0) + assert np.allclose(I.output_states[0].value, -2.0) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, -2.0) + assert np.allclose(I.output_states[0].value, -2.0) + + # reinitialize mechanism + I.reinitialize(4.0) + assert np.allclose(I.function_object.value, 4.0) + assert np.allclose(I.value, 4.0) + assert np.allclose(I.output_states[0].value, 4.0) + + I.execute(1.0) + # 4.0 + (1.0 * 4.0 - 1.0 * 1.0) * 1.0 = 4 + 3 = 7 + assert np.allclose(I.value, 7.0) + assert np.allclose(I.output_states[0].value, 7.0) + + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) + + def test_Accumulator_valid(self): + I = IntegratorMechanism( + name='IntegratorMechanism', + function=AccumulatorIntegrator(increment=0.1), + ) + + # returns previous_value * rate + noise + increment + # initializer = 0.0, rate = 1.0, noise = 0.0, increment = 0.1 + # returns 0.0*1.0 + 0.0 + 0.1 = 0.1 + I.execute(10000) + assert np.allclose(I.value, 0.1) + assert np.allclose(I.output_state.value, 0.1) + + # reinitialize function + I.function_object.reinitialize(2.0) + assert np.allclose(I.function_object.value, 2.0) + assert np.allclose(I.value, 0.1) + assert np.allclose(I.output_states[0].value, 0.1) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.1) + assert np.allclose(I.output_states[0].value, 0.1) + + # reinitialize mechanism + I.reinitialize(5.0) + assert np.allclose(I.function_object.value, 5.0) + assert np.allclose(I.value, 5.0) + assert np.allclose(I.output_states[0].value, 5.0) + + I.execute(10000) + # 5.0 * 1.0 + 0.0 + 0.1 + assert np.allclose(I.value, 5.1) + assert np.allclose(I.output_states[0].value, 5.1) + + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) + + def test_LCA_valid(self): + I = IntegratorMechanism( + name='IntegratorMechanism', + function=LCAIntegrator(), + ) + + # previous_value + (rate*previous_value + new_value)*time_step_size + noise + # initializer=0.0, rate=1.0, time_step_size=0.1, noise=0.0 + # returns 0.0 + (1.0*0.0 + 2.0)*0.1 = 2.0 + I.execute(2.0) + assert np.allclose(I.value, 0.2) + assert np.allclose(I.output_state.value, 0.2) + + # reinitialize function + I.function_object.reinitialize(5.0) + assert np.allclose(I.function_object.value, 5.0) + assert np.allclose(I.value, 0.2) + assert np.allclose(I.output_states[0].value, 0.2) + + # reinitialize function without value spec + I.function_object.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.2) + assert np.allclose(I.output_states[0].value, 0.2) + + # reinitialize mechanism + I.reinitialize(4.0) + assert np.allclose(I.function_object.value, 4.0) + assert np.allclose(I.value, 4.0) + assert np.allclose(I.output_states[0].value, 4.0) + + I.execute(1.0) + # 4.0 + (1.0*4.0 + 1.0)*0.1 + 0.0 + assert np.allclose(I.value, 4.5) + assert np.allclose(I.output_states[0].value, 4.5) + + # reinitialize mechanism without value spec + I.reinitialize() + assert np.allclose(I.function_object.value, 0.0) + assert np.allclose(I.value, 0.0) + assert np.allclose(I.output_states[0].value, 0.0) + + class TestIntegratorFunctions: def test_simple_integrator(self): @@ -658,7 +786,7 @@ def test_integrator_simple_noise_fn(self): val = float(I.execute(10)) - I.function_object.reinitialize = 5.0 + I.function_object.reinitialize(5.0) val2 = float(I.execute(0)) diff --git a/tests/mechanisms/test_transfer_mechanism.py b/tests/mechanisms/test_transfer_mechanism.py index a8d0fa5311c..c6f5fdeed10 100644 --- a/tests/mechanisms/test_transfer_mechanism.py +++ b/tests/mechanisms/test_transfer_mechanism.py @@ -13,7 +13,6 @@ from psyneulink.components.system import System - class TestTransferMechanismInputs: # VALID INPUTS @@ -882,7 +881,7 @@ def test_previous_value_reset_initializer_execute(self): assert np.allclose(T.initial_value, 0.5) assert np.allclose(T.integrator_function.initializer, 0.5) - T.integrator_function.reinitialize = 0.5 + T.integrator_function.reinitialize(0.5) assert np.allclose(T.previous_value, 0.5) assert np.allclose(T.initial_value, 0.5) @@ -922,11 +921,19 @@ def test_previous_reset_initializer_run(self): assert np.allclose(T.initial_value, 0.5) assert np.allclose(T.integrator_function.initializer, 0.5) - T.integrator_function.reinitialize = 0.5 + T.integrator_function.reinitialize(0.9) + + assert np.allclose(T.previous_value, 0.9) + assert np.allclose(T.initial_value, 0.5) + assert np.allclose(T.integrator_function.initializer, 0.9) + assert np.allclose(T.value, 0.595) + + T.reinitialize(0.5) assert np.allclose(T.previous_value, 0.5) assert np.allclose(T.initial_value, 0.5) assert np.allclose(T.integrator_function.initializer, 0.5) + assert np.allclose(T.value, 0.5) S.run(inputs={T: 1.0}, num_trials=2) # Trial 3 From 006bc32ac0befe79a86e91cc84ea24039b110082 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 5 Feb 2018 14:12:54 -0500 Subject: [PATCH 05/17] fixing bug in log: needed to switch between 'value' and mechanism name in several loops in order to assemble log.nparray --- psyneulink/globals/log.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index 8801fda66a3..2c07909b6a9 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -1288,6 +1288,7 @@ def nparray(self, # Get time values for all entries and sort them time_values = [] for entry in entries: + entry = self._dealias_owner_name(entry) time_values.extend([item.time for item in self.logged_entries[entry] if all(i is not None for i in item.time)]) @@ -1324,6 +1325,7 @@ def nparray(self, # if so, enter it in the entry's list # if not, enter `None` and check for a match in the next time column for entry in entries: + entry = self._dealias_owner_name(entry) row = [] time_col = iter(time_values) for datum in self.logged_entries[entry]: @@ -1452,7 +1454,7 @@ def _validate_entries_arg(self, entries, loggable=True, logged=False): if self._alias_owner_name(entry) not in self.loggable_items: raise LogError("{0} is not a loggable attribute of {1}".format(repr(entry), self.owner.name)) if logged: - if entry not in self.logged_entries: + if entry not in self.logged_entries and entry != 'value': # raise LogError("{} is not currently being logged by {} (try using set_log_conditions)". # format(repr(entry), self.owner.name)) print("\n{} is not currently being logged by {} or has not data (try using set_log_conditions)". From 4be99ebbaa75d4b617f61d40c86d004f9b62d410 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 5 Feb 2018 14:49:31 -0500 Subject: [PATCH 06/17] more calls to _dealias_owner_name required in order to create log.nparray --- psyneulink/globals/log.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index 2c07909b6a9..ab091d804d3 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -1308,10 +1308,13 @@ def nparray(self, # If any time values are empty, revert to indexing the entries; # this requires that all entries have the same length else: - max_len = max([len(self.logged_entries[e]) for e in entries]) + max_len = max([len(self.logged_entries[self._dealias_owner_name(e)]) for e in entries]) # If there are no time values, only support entries of the same length - if not all(len(self.logged_entries[e])==len(self.logged_entries[entries[0]])for e in entries): + print("entries =", entries) + for e in entries: + print("e = ", self._dealias_owner_name(e)) + if not all(len(self.logged_entries[self._dealias_owner_name(e)])==len(self.logged_entries[self._dealias_owner_name(entries[0])])for e in entries): raise LogError("nparray output requires that all entries have time values or are of equal length") npa = np.arange(max_len).reshape(max_len,1).tolist() From 05f15a7fe40a97dd5770f8cdcbc98990fef4498d Mon Sep 17 00:00:00 2001 From: KristenManning Date: Mon, 5 Feb 2018 17:31:55 -0500 Subject: [PATCH 07/17] beginning to add a dictionary alternative to the options for whats returned by the log --- psyneulink/globals/log.py | 152 +++++++++++++++++++++++++++++++++++++- 1 file changed, 148 insertions(+), 4 deletions(-) diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index ab091d804d3..13b72d48bbe 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -1311,9 +1311,7 @@ def nparray(self, max_len = max([len(self.logged_entries[self._dealias_owner_name(e)]) for e in entries]) # If there are no time values, only support entries of the same length - print("entries =", entries) - for e in entries: - print("e = ", self._dealias_owner_name(e)) + # Must dealias both e and zeroth entry because either/both of these could be 'value' if not all(len(self.logged_entries[self._dealias_owner_name(e)])==len(self.logged_entries[self._dealias_owner_name(entries[0])])for e in entries): raise LogError("nparray output requires that all entries have time values or are of equal length") @@ -1361,8 +1359,154 @@ def nparray(self, npa.append(row) npa = np.array(npa, dtype=object) - return(npa) + return npa + + def nparray_dictionary(self, + entries=None, + owner_name:bool=False): + """ + nparray_dictionary( \ + entries=None, \ + owner_name=False): \ + ) + + Returns a Python ordered dictionary. Keys are logged items, ordered in the same order as they are specified in + the **entries** argument, plus Time. Values are numpy arrays in which each item is the data for that time point of the + component specified in the Key. + + - - - - example - - - - + + If all of the data for every entry has a time value (i.e., the time field of its LogEntry is not `None`), + then the first three rows are time indices for the run, trial and time_step of each data item, respectively. + Each subsequent row is the times series of data for a given entry. If there is no data for a given entry + at a given time point, it is entered as `None`. + + If any of the data for any entry does not have a time value (e.g., if that Component was not run within a + System), then all of the entries must have the same number of data (LogEntry) items, and the first row is a + sequential index (starting with 0) that simply designates the data item number. + + .. note:: + For data without time stamps, the nth items in each entry correspond (i.e., ones in the same column) + are not guaranteed to have been logged at the same time point. + + If header is `True`, the first item of each row is a header field: for time indices it is either "Run", + "Trial", and "Time_step", or "Index" if any data are missing time stamps. For subsequent rows it is the name + of the Component logged in that entry (see **owner_name** argument below for formatting). + + + Arguments + --------- + + entries : string, Component or list containing either : default ALL + specifies the entries of the Log to be included in the output; they must be `loggable_items + ` of the Log that have been logged (i.e., are also `logged_items `). + If **entries** is *ALL* or is not specified, then all `logged_items ` are included. + + COMMENT: + time : TimeScale or ALL : default ALL + specifies the "granularity" of how the time of an entry is reported. *ALL* (same as `TIME_STEP + ) reports every entry in the Log in a separate column (axis 1) of the np.array + returned. + COMMENT + + header : bool : default True + specifies whether or not to include a header in each row with the name of the Component for that entry. + + owner_name : bool : default False + specifies whether or not to include the Log's `owner ` in the header of each field; + if it is True, the format of the header for each field is "[]"; + otherwise, it is "". + + Returns: + 2d np.array + """ + + entries = self._validate_entries_arg(entries, logged=True) + + if not entries: + return None + + if owner_name is True: + owner_name_str = self.owner.name + lb = "[" + rb = "]" + else: + owner_name_str = lb = rb = "" + + # Get time values for all entries and sort them + time_values = [] + for entry in entries: + entry = self._dealias_owner_name(entry) + time_values.extend([item.time + for item in self.logged_entries[entry] + if all(i is not None for i in item.time)]) + # Insure that all time values are assigned, get rid of duplicates, and sort + if all(all(i is not None for i in t) for t in time_values): + time_values = sorted(list(set(time_values))) + npa = [] + + # Create time rows (one for each time scale) + if time_values: + for i in range(NUM_TIME_SCALES): + row = [[t[i]] for t in time_values] + time_header = [TIME_SCALE_NAMES[i].capitalize()] + row = [time_header] + row + npa.append(row) + # If any time values are empty, revert to indexing the entries; + # this requires that all entries have the same length + else: + max_len = max([len(self.logged_entries[self._dealias_owner_name(e)]) for e in entries]) + + # If there are no time values, only support entries of the same length + # Must dealias both e and zeroth entry because either/both of these could be 'value' + if not all(len(self.logged_entries[self._dealias_owner_name(e)])==len(self.logged_entries[self._dealias_owner_name(entries[0])])for e in entries): + raise LogError("nparray output requires that all entries have time values or are of equal length") + + npa = np.arange(max_len).reshape(max_len,1).tolist() + + + + # For each entry, iterate through its LogEntry tuples: + # for each LogEntry tuple, check whether its time matches that of the next column: + # if so, enter it in the entry's list + # if not, enter `None` and check for a match in the next time column + for entry in entries: + entry = self._dealias_owner_name(entry) + row = [] + time_col = iter(time_values) + for datum in self.logged_entries[entry]: + if time_values: + # time_col = iter(time_values) + # # MODIFIED 12/14/17 OLD: + # while datum.time != next(time_col,None): + # row.append(None) + # value = None if datum.value is None else np.array(datum.value).tolist() + # row.append(value) + # MODIFIED 12/14/17 NEW: + for i in range(len(time_values)): + time = next(time_col,None) + if time is None: + break + if datum.time != time: + row.append(None) + continue + value = None if datum.value is None else np.array(datum.value).tolist() + row.append(value) + break + else: + value = None if datum.value is None else datum.value.tolist() + row.append(value) + # MODIFIED 12/14/17 END + + if header: + entry_header = "{}{}{}{}".format(owner_name_str, lb, self._alias_owner_name(entry), rb) + row = [entry_header] + row + npa.append(row) + + npa = np.array(npa, dtype=object) + + return npa @tc.typecheck def csv(self, entries=None, owner_name:bool=False, quotes:tc.optional(tc.any(bool, str))="\'"): """ From 7c6e82d221c52c1671f07e78ea216ef86dfd710b Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 7 Feb 2018 16:47:58 -0500 Subject: [PATCH 08/17] refactoring log to have a dictionary option and writing tests for it --- psyneulink/globals/log.py | 110 ++++----- tests/learning/Stroop-Model-No-Learning.pdf | Bin 0 -> 17487 bytes tests/learning/Stroop-Model.pdf | Bin 0 -> 21562 bytes tests/learning/System-0.pdf | Bin 0 -> 21809 bytes tests/log/test_log.py | 240 ++++++++++++++++++++ 5 files changed, 288 insertions(+), 62 deletions(-) create mode 100644 tests/learning/Stroop-Model-No-Learning.pdf create mode 100644 tests/learning/Stroop-Model.pdf create mode 100644 tests/learning/System-0.pdf diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index 13b72d48bbe..eb805c99037 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -383,7 +383,7 @@ import warnings import inspect import typecheck as tc -from collections import namedtuple +from collections import namedtuple, OrderedDict # from enum import IntEnum, unique, auto from enum import IntEnum, unique @@ -1364,36 +1364,32 @@ def nparray(self, def nparray_dictionary(self, entries=None, - owner_name:bool=False): + # owner_name:bool=False + ): """ nparray_dictionary( \ entries=None, \ - owner_name=False): \ ) - Returns a Python ordered dictionary. Keys are logged items, ordered in the same order as they are specified in - the **entries** argument, plus Time. Values are numpy arrays in which each item is the data for that time point of the - component specified in the Key. + Returns a Python ordered dictionary. + + Keys are logged items, ordered in the same order as they are specified in the **entries** argument. Time/index + is the recorded in the first three or one key(s). - - - - - example - - - - + Values are numpy arrays of data generated by the logged Component (specified in key). Each item in the array + corresponds to a time point (or index). If all of the data for every entry has a time value (i.e., the time field of its LogEntry is not `None`), - then the first three rows are time indices for the run, trial and time_step of each data item, respectively. - Each subsequent row is the times series of data for a given entry. If there is no data for a given entry - at a given time point, it is entered as `None`. + then the first three keys are time indices for the run, trial and time_step of each data item, respectively. + If there is no data for a given entry at a given time point, it is entered as `None`. If any of the data for any entry does not have a time value (e.g., if that Component was not run within a - System), then all of the entries must have the same number of data (LogEntry) items, and the first row is a + System), then all of the entries must have the same number of data (LogEntry) items, and the first key is a sequential index (starting with 0) that simply designates the data item number. .. note:: - For data without time stamps, the nth items in each entry correspond (i.e., ones in the same column) - are not guaranteed to have been logged at the same time point. - - If header is `True`, the first item of each row is a header field: for time indices it is either "Run", - "Trial", and "Time_step", or "Index" if any data are missing time stamps. For subsequent rows it is the name - of the Component logged in that entry (see **owner_name** argument below for formatting). - + For data without time stamps, the nth item in each dictionary key (i.e., data in the same "column") + is not guaranteed to have been logged at the same time point across all keys (Components). Arguments --------- @@ -1410,13 +1406,12 @@ def nparray_dictionary(self, returned. COMMENT - header : bool : default True - specifies whether or not to include a header in each row with the name of the Component for that entry. - + COMMENT: owner_name : bool : default False - specifies whether or not to include the Log's `owner ` in the header of each field; - if it is True, the format of the header for each field is "[]"; + specifies whether or not to include the Log's `owner ` in the dictionary key of each entry; + if it is True, the format of the key for each field is "[]"; otherwise, it is "". + COMMENT Returns: 2d np.array @@ -1427,12 +1422,12 @@ def nparray_dictionary(self, if not entries: return None - if owner_name is True: - owner_name_str = self.owner.name - lb = "[" - rb = "]" - else: - owner_name_str = lb = rb = "" + # if owner_name is True: + # owner_name_str = self.owner.name + # lb = "[" + # rb = "]" + # else: + # owner_name_str = lb = rb = "" # Get time values for all entries and sort them time_values = [] @@ -1444,48 +1439,46 @@ def nparray_dictionary(self, # Insure that all time values are assigned, get rid of duplicates, and sort if all(all(i is not None for i in t) for t in time_values): time_values = sorted(list(set(time_values))) - npa = [] - # Create time rows (one for each time scale) + log_dict = OrderedDict() + + # Initialize log_dict with time/index arrays + + # If all time values are recorded: + # log_dict = {"Run": array, "Trial": array, "Time_step": array} if time_values: for i in range(NUM_TIME_SCALES): row = [[t[i]] for t in time_values] - time_header = [TIME_SCALE_NAMES[i].capitalize()] - row = [time_header] + row - npa.append(row) - # If any time values are empty, revert to indexing the entries; - # this requires that all entries have the same length + time_header = TIME_SCALE_NAMES[i].capitalize() + log_dict[time_header] = row + + # If ANY time values are empty (components were run outside of a System) + # log_dict = {"Index": array} else: - max_len = max([len(self.logged_entries[self._dealias_owner_name(e)]) for e in entries]) + # find number of values logged by zeroth component + num_indicies = len(self.logged_entries[self._dealias_owner_name(entries[0])]) # If there are no time values, only support entries of the same length # Must dealias both e and zeroth entry because either/both of these could be 'value' - if not all(len(self.logged_entries[self._dealias_owner_name(e)])==len(self.logged_entries[self._dealias_owner_name(entries[0])])for e in entries): + if not all(len(self.logged_entries[self._dealias_owner_name(e)]) == num_indicies for e in entries): raise LogError("nparray output requires that all entries have time values or are of equal length") - npa = np.arange(max_len).reshape(max_len,1).tolist() - - + log_dict["Index"] = np.arange(num_indicies).reshape(num_indicies, 1).tolist() - # For each entry, iterate through its LogEntry tuples: - # for each LogEntry tuple, check whether its time matches that of the next column: - # if so, enter it in the entry's list - # if not, enter `None` and check for a match in the next time column + # iterate through its LogEntry tuples: for entry in entries: entry = self._dealias_owner_name(entry) row = [] time_col = iter(time_values) for datum in self.logged_entries[entry]: + # iterate through log entry tuples: + # check whether tuple's time value matches the time for which data is currently being recorded + # if so, enter tuple's Component value in the entry's list + # if not, enter `None` in the entry's list + if time_values: - # time_col = iter(time_values) - # # MODIFIED 12/14/17 OLD: - # while datum.time != next(time_col,None): - # row.append(None) - # value = None if datum.value is None else np.array(datum.value).tolist() - # row.append(value) - # MODIFIED 12/14/17 NEW: for i in range(len(time_values)): - time = next(time_col,None) + time = next(time_col, None) if time is None: break if datum.time != time: @@ -1497,16 +1490,10 @@ def nparray_dictionary(self, else: value = None if datum.value is None else datum.value.tolist() row.append(value) - # MODIFIED 12/14/17 END - if header: - entry_header = "{}{}{}{}".format(owner_name_str, lb, self._alias_owner_name(entry), rb) - row = [entry_header] + row - npa.append(row) + log_dict[self._alias_owner_name(entry)] = np.array(row) - npa = np.array(npa, dtype=object) - - return npa + return log_dict @tc.typecheck def csv(self, entries=None, owner_name:bool=False, quotes:tc.optional(tc.any(bool, str))="\'"): """ @@ -1614,7 +1601,6 @@ def _alias_owner_name(self, name): """ return VALUE if name is self.owner.name else name - def _dealias_owner_name(self, name): """De-alias VALUE to name of owner """ diff --git a/tests/learning/Stroop-Model-No-Learning.pdf b/tests/learning/Stroop-Model-No-Learning.pdf new file mode 100644 index 0000000000000000000000000000000000000000..938ca632c05ca9bc42cb61cd99eea9e19a3e014d GIT binary patch literal 17487 zcmajH1wb6zvNnnn971r(;4Xu^ySux)ySoI3;1=8+f?IHRx8UyXkL^;b>RkjM#%&;V%}VM%&-uXc}%u76DS_QNs(=mE9{7O-4g06HmS8&fAU z0P_c=0H6~ww{kLe_}E+NI~fZZ8`>Hf19*5~9i1GE^{rvuK>JjsZE;wU+pkqeB)XxxWZt+ysB` zK$#JJc@wG{ca_>t-nq!m8*%i~=AHFzFXLU|l&}JqR8X*u*pWLJccOM1DajWjSp1#I z&}V>(Z!oP_M>^+k6`p5&i_Kw@u{$WNTujIOqhmV>YOuTids}2*hI2Rxl>7`U9;UUZ z9bM=B&;R=1h9X zPxe@!5>>Jnoc78mk1U@r_OZpmejJ{+Ce~N2S1jCQvG7hK;k3|`jUTvOZ*iBX#Yx39 z^4N~}=%j~Y+CVc1J9nm^K3KI7vi0r6;aw&DdL&;<|JhS?x7o}jUt-&kVG^DUh(|jo za{KWk!=xq8-?#yh%TQ+Cy*f_v=M-c5wAm8!1(7=rNziO|Zfv>gJ{NNx;#Z}WB;N2M>If3VEv1*u~Sa&g{|8l%4sL4)N&u`HV zsZg)ZG{3EX%z*vk7h1;3s;c^xr($exJ8N#jf!a0t(TF5)9r`$@zy>>0cf1C*LCI9n zi2&=9`e->yJy<_QZXzWX7=!Z9G_7$66&OpffFBtV0Bp{5LFCC{MshN2_=q~50Rf&o z^2%v>SfLSMu$f_)9DJ0DH;h{a<#>44kC-ri#zBR0i#h#ofJ#aOqpGC32`5R=4F?C~ z4S1Ohu-3u_G7M&Aw$gRMOYN~u{+gvBLBtqIu7qfDa+7Ib_|WT8`uukLaTWG7ub0{2 zlzzLmDi%uJQOu38k1d^cBa$>zc05%eflI6@tV*tEFl=-U`n!D^N+N?HS`rzGYP3av5*0@`G(8L8r6^b`UZI$3E`<^@=_^2YhcL61KF8Ci znuczbgs@|NBRAZO2NS3LX@)3T`XITjw7+&119jWQq3P;oVGzi^R0-Ycdii;f@QD?_ zfX9Bd{0Wf`FIMskSB4YCF2v_Mdx*JOWSA@hWM(zSOaeKLKBHhHjY(p9K4y24?mPlM z%RNm$Q_qtm31w<7)5851)HJ8XG9ta8?`;xJxiG;DIL&UcK4ywzv@z=yl> z4ow$>(Q*zl4lo}*C>7Il&JdUZPfK1eZ_%{Q{_i7*;isL&7)@A>QcGTq5L@VCW(Lk6o3xJB^WXh4e1P5n~AfN1;n zBzm@?fOM+!I1V%Yi(X>TJU8dnh0#s>84Ab)K1c9c`Z!QFmx%# zF}KBa@@Ju6lDOtq8cN3!LLvYO0jz&D&%^U!;6Eh(1uH250c?NZkJkT~`hNtl{|^EG+#-N~boo$D=A#>Q{D1Bf zHXk<$0G*Jri@Bk(f~dg%{q~WWS&^-B>uDsLd65t~S}7#IP4m2^T7B z3KFD#J>wcb#7ZZ=cMF~jF5=|G$`Lf%l9uX#^*KAhcDOTy{XTW%>{*4g5QEpBNF0sP zSa`Rvs8CX9bQB&jdBW)R9{GK{i`{%|sE0Zuy9?to_&{0y;5$rExLIp3m%myptr2XH z@7$&@CJPxfmQSYT7c*yF5bt1ixh8SwL0fJRK_YNs8(t4E?_*75AnagAn^{F%m17Kp z$J_qXS2(S2Hk(rXgo=B2@G(XXB)QaBc^g4i-C&<%p`CufGWpZegHh`t;QBGzfv*EV zg8amg!IQvg#lXz@AZqz}7C~V7fs3FnffjZkv;H2F5HbNQc2HUns$B?naPWSje6S(i zlJubK{^p+oE|Af3`AcKb^N~^o=8%dvd{?-LAK%lV0yd+Mun6+gZ>gFS_mk+ak>5gYXc^ELT`QXsP3Sy` zjlVE9aV|ipgLQu;eBvAY1U@CA_6g@wNFY{?7!-xTGLA4GtVB317BL^EIZ7NGOZ5 zOgv8ZB$55TknEbQOPfumOW-T~Xut&$_MS^tYWSX?+QH_;%>Ak84$ zAaIMhPoAVL-ao$avup|9Vf8dg0RgtaOo97!Q@wOO$FlJvlwOI1 zi6)7li7};8%C>V~<}ypIOJ!&2El4f&EtV{v=V;5KCWCIQQO5&Y@h#W~+{I`y;k^EM5MO5@6=DDyP(IQ!WQf7*Y$a^3j_{fE83@fC>K59Lc)#f6Lf#tWJ%@69u-Pt>fTtGtr9t#SM`D!mZ3@6v1CySB)^^xE`H zg(Qg-CmJ#Ey(nQHdjd`;EV)WGPPM8r{@&o+;=G85gD0FPiU+mBrK8N7?m_<*?A7II z?Rw#2|7qb_7J?0;9OeKvA3_8|1)2wr0p>HLGel1}Xm@MCSGs9kVy#(pFo73AmtZa! zW_WG1Cr3LD7gPbV+nqks;AO;W<%hVNc;? zVaa4QYW2Dukw}e5Kp{8!R~$4?Vi$*!`{^~w(Cw2g*zGe~My;f}lVyj7-I9@s%2?fQ zb$d#DoUeEr$nW8(ze;wcDsOa~U3VcC5?Vu_gQkR%d$q$2 z#Vf_p#J9m@`5S&a*XmyR@05Ix{hmj`STa>IW*$`kP(LF;qW`U$N}G|JtevihDUV*T zW!QY~Oc^zoZz@iT+)>@6H6{8@>Q*e0IGrR=9j?xzcHLCeID1%r6lL#bCvC@O1aIhi z=-G$agxS^Ew8P~s4b%&l=-T7X>K(m1SW;@NeCZbiJ6`IxQnn_)S#Y!@v&eW~J$kw+lS(4TG;|+Ch z)yK8byP(tdg1dre*WWImTvyi)Tdvho4b*j(CM$nySL}{nyygc8HUjp`t$~NH=UMPvOYYyiOApCjv+sURYJAhknO~g`|C!HX?kVND zap*Ubi8qc%!<+u@brX4)npQV#Dmf81j+T+mp6%*$CD0#y84->V%AxAS(^9`Pomp93 zxjy2bT%~i_>`=1O{`{hxTxMFyuFdJ4>D_)B@)|Xd-{+y&L_Wi31)1^>jwt;rXt!%6;g){yGI+f~3Pc>cRGE_G);tol6EQ zOPN*5Tk$&guwYWsesl6;L2sfi%rqvXNs#V6@v%y_2Xdlhf=Z-bstxHb!r;a_z1 z53&7&eQ;oLK|uk1M`I(v9}24g(EeAIKivCYl=EFEH@gKtfZ(99_Y5%7Ze-UvyMP~!2zuW~S2WR8I zwfobFzfVfbizsv`H{mtElj2#Uf%yTUy^x z!PZ*e=HG~jxr3vVpqakIhcof<%t!y90+4|oK&NVMi6 z?0$^ppKApEa@A;9SeOAcj10^G2Ko=#Kze!>ZCE<-kM&?~$Zun6W&F`KI(|pPKaLF)v!!^afT3F_O48JnA$IRRLKEC4!1Cu3_B0Q*NZDP#RV(+d01Cpxu%sxSlD z|E?kiU}O4d)y4F$Sp+aKeXLh$eYd|$f9dg;h9A%Mj2QsHzx~gDFUP-tf33%Vxq1Ki zpnn&E03ZvH{zJt7?a5%UCybub+}wNneY<%!?)!mgjPHjcF-meO- z8Fu}p#gb`KJ(UZ6uXv{8sP<^hB^SOvf0k{e)2?Ai*t;CP+qF)8s*gjy&Tfh3^*u?H zzHfURYFP_RSm|tzkej_qs1iqeWS&A5QDTT3B^1&Dn4;oMw`mR7v~C|ulkF{*y;*<4 z7P?0AR#5AW+uylck>z2xOO=2}CyvVHq3d#esu>zGYLk%RalgV=-nj^viSdO_r9e0e zfBf~FodlH-2ttZ*7Ht~pDi*BZa-UK?PbTb{arW7B44EPTJv5v?z>ea#c$bZz)SehS zxiX3l(g6q;%p$@flH4c$K}jON0>3f8KoH^KB!RWKY7Is5$IFVZ*A0?74qsxy#E2v9 zF!f;d5P!3DnfMVNU1k*c9fP)l4uHT;5!>G56=1eDdgg_Bplj<8#yido?V-n=G*?sL z$)+VmTTV!-mPx7N$R~~CbI{^ppcfgM@{2=bp99q;pDelJp%ZTOexH7#SP?w=9#6kS z*enj#MGUXJV-{aW#;a26-bjo}2@H>aB+fWWHfx-dS1>f7aQV(Ap5omXoiDiKets zy6EE@C$BY$!BSw&AnU=u==nn4sZN77;Z;M%g7}Rfm->BLfpbHukft4nU2wh^5X+i1%W2BA+7ml0q& zw;CM>2`cgepyG~550OrkZYUM8=KVFED{RdJnjXK`W)&h5y2f%IY@7e&ipu{Iz~YPY z0h>&K_6IJ0Pwhg!so8h|cG7F{7z3kb7ndgZ3bOqrc)=5WM2zkj-{Q}Wc=~mnG0^-C z?kz97g*T%!>$lC9ftfB`LY&KH1J3n^G^fS6co;L$Y^R^5qH&jVrlPf-n8c>|8`kUM z(b8gEP73g3C!^8oi9|SI%|v-l!zLXnVs>4!p-Tjn$a1r=X`QGW_cLlHd0dj9< zJLaIe5UoOsg?|r`h--5)_C<`j_V`vhbC{DzB565pJh9ocUkyJI2M&S-cZ0ilz|M4q zn^s3u4mApT10D^g00P-?O&_x&WG2c6FNcr<-h^-W}~1}+@%(L5GwZ&9}^w&lHQhDMFG zD`G6@TX#}kn)j|tn>-%2u2!l|{nFYzPu664*2=tFQfU8O_R->U91ecdwD)oOpa|Phu&hvUOSb? z5H4e(D7yxUgk~>Nl~xLOzpiZ_yVY~Ikum|>*oa}5ry((__$2B=iv~UJ`R4PzLrAxL zVlMP{>FLM1u^qNj;FumT1s*EUAQoYi55BTvmY-(al{=Y`g%=WDI-J@L4LihvIWU&q zLT|?_elY$>{Wa3Qo@a_TW>)vJct#4!XbrjK72)HFy--+DVG6^jzy=oUjq^9OlAS&g>ghhsWS-=4`XP<7oGCr=Q~ zGGdGS7pv+qee+=0y=_t1S(tO#CE~-g4Jt==R&gUTonO-Qz^-8J`jg(fMcP5aZHSua@DlTrvu7Z23@Vvwn{YCA?$)S@yu# zcg|%NTKpnz!?Zo-g$ChcjXY;-zxm#NDvXEEc*#P9+Sh4r{KN!(8g!|ReglpxpQWdM zF88*XS?60uKAe&DjkUQ+)&GoOS{SoBv6x@BXdN&!Jj!*nO!pv_p`qd}t4f!>S7y2G zykSmcRq20DBkyScjgsD0MG?pLJ}Ni#>wW^3(i@-+GE;w+Zd z8p$eA8%t}{D@xhj)G=r8k~gP4IU|*;Z}CnogcD_3?jnYTTfSETn5ebHnVa1b@LElK`ifeeHl> zN0)Q(#K1Smas{C+mIY`Xs=LAL@Q6PFvS&5s6-AunjFd$|Xm|Rt zS2g6|&^b#uGizLufXPq$h}92B5Z_^5p4c*LkMa`nc4}b4ui(m%$iMrc5>_7zx#Lrt z5S@>zURlY`OmT~S`6H%m^2j?s-D+S(G>hO|A>vHU!tw}{tH!_qe{V)or|4C|LC?At z+iAvMB2XMsA}O9X4r9m%yTI2((ivSlTxLc)!6Tw+@YA&46wfz0-hz$l;|m?Sc7Uu{ z6cNkhE=I=kj{6k~!lb?vY-S%+UBLYo3LF(;5g%b6={Kp06!W0UAG}4r1=-b?R*27N&uUKvFDWk}KC~v@Ky?8df|vx` z;)LkJnFC=bP;Kdj8Qsa}14}2~*NnO9R4X7SB~5IS_{8A$#dxghAnSq6w6zkCEDPy9 z@q7FWsd9WvxD+^VdEdb~VQ2TKORV?Kxf@b!Oi6TF34(3V-ZYd4&HC{H%?&|I6px7K z)QHa$@D)sRy7TZJo|8sV4Y?nQ)j!vYoqANa7J8AV?RklJ$u(YeIALpAtVF^hKFL7o zxM1ayhs2v0ddJ%V)`@m%tzxBorO;FBzWgl3VCo&$^QDZJi%X8_LO-6EmH1r7h0Ig& zD+=p8ax1h}HD#m^?Ygkqd26@5Y&>fTWIfy@AxDx9$na=R!~uf`8o4`Bd-j`3wfZ&C~8p2DtbKnlt4>EC&Fb*nA< zkBW|p4mEEY4;zm+80rH!vx#Kmcmu#?d2fEytCS{W5jNH;EFod+IbU4kGU%rDc9mG+ ztaZ&#>LgXQN?`T$PY&&*;DT4^>ZLEiu!y7y9UsEihDC|77B}r|fR@E)6N&B=0UH;l zu9Uw8>kLFwqE92>lz_2JUr*by)3yt&CnCzVzX052S>)-sf8i$<2Eb*-4IE*MSKfd; z2G&Pb63EtY2uN+=K(iq`1d>)IH&(T0}%hKeD%yi` zM7sn=fpgE(PfC(I4OS6VJ+>u7ia<7DN(C1F50jOl{7 z(1WX^ypxJF0yEvKxHOHnBBypjX&j{jb3U5_MijA5S(M1r!HIcPmu*QDi|}Vr2p;j@ zjc}{5T&x&hg^s-7EQny1u}dVSnjH8|wJlF$4VH~rqGF76x?a&G8eT`eT&gW>Sexrv9^Oj6KN zBG?vYg)#OLplb=I4vIf>zV3ixp^Ab&Ab*d1ifIA_exBOK~qnmkG4HkqH^ac6hmzRW5zyw zVFiU2ig~#{$!Jc+E#P})-3-oLak^SKtPk+Dq1F8z<&E785iT@P-|xgBKnvr7YK>`SIX^7sVLmU^j8Q_yT#S=9)u@K!64}gLl0{0qsFb1IwdW^HnaWTqk}+3p zQV`LhdD%Ev*#LytQLvZpUCgl@ij+w{hVP?6U&2oc^+?@S`tqW6S4daxT`LERElBn6)x(-xRpmz>|_%S%CGSPfCPA6K%KTI`!1k zM%knQTkWBv*$8n=SX>x~`x~Pzw?=;TPgsq{3R8bF^)4r{k?I0^#h7*yl`+Z@RRNS+ zFop{JDkLCNLm&8GX)b^&u`qrfU2I(*PlQV#P5#?{8u)op&KEDGQzksqtRRhnBqe%M z0h+jw&nW$(X$Ut)mc-i$GCUkJBun8PofvJRQ<6ni7(QQT-* zfC>RPm;$)H;7+sZdg77es)UW&}) z+~bXll^fR0J4fF#pW?T*Gw3%3@pML!^G2O_IAD=~GktqOj(hT*blUO!9QVwgsX|s% zqq3Hc*~gw%G8s+eQ?&l0)Vsbu)aoY|vL)iH$x5G)LO~d@c&F=&M*>}CmKheCy%4v3 zkdmshY80l7rdcHW@UUoNGNnhGP2x1TYdeqm-Hc@wEf*Pf(dABk(P884-CQg9#<}fL zo96=KkR5}A%DZ)%F)~g*L!Nj+ulWuUy>$}#mdFZ7HQC$dz@~8f~m#yGj|~ zTgj3%>AYTJu=VJjr>RNgYWnLCTU~Ciyb5NI=8->lPObk36Pw^Hx=3q~oye-EV~@hm z|H|FIeKx*i>&1I530>c`LKJZ>2*1D zl!Jb&$Y3=chte~|upH0Ku%O)oHPca)wN+mka9g-=n}GXdko-$nh>=67jE+juy!x&n z`kn(9>`{WTl(HH(nYM`Tws~(C&B4LVTxKvN9yem{TXoFoH1+7t?Uu+$w;J(@oeI3? zF*AOLNC@6$Le<_Teu`2ztxJnw#LSRTXxh{nNYdH<4-*%Ynh;!!iEV4wJaeZFOO`q) z!27IK#{7FlqZ`-S*K=v7!YJ%KoV=6iQx%s(U8XkN>1+VD!3JKzYHvN6g|L)0YWnwBFuyDrR6k7qLQYxt!RvN3-XvFwo5FUDkYb0rA#>0 zc@Rr%kC(~W9@#M2bqetJN4eyby^VhC#`YXm{NPfv#ldc@kiQ4W`!nN3g_+piiu1F)KG$eb;3)4tBP2p%Z(I^kQE?d?BMZz3#5+w{jtGAg4d~n=jc7$QWw@r02 zV))XVPn3}>S{lRF2EYp{FEGY3?-t*8IQn>ir^ADzzBfuo6W9KPS%l}9l5laBwEQiCGe`OetB71dLpbNri1@aO8K z+|>b{3rXieP;QYC6_YN$Oz!B8EoT@S>L;V-5hIELvt@&2TLV%&Lz6&$)3AeE3Ed;A z95Fwyid1%2s?xrI*zDcshw?8LH2|x_Y6U7rQSt?~3`$Za2|_I#0(Su>LgaO**o3sy zMnU*Cg#6CK-7%0ym20~x5}Ks2M3HjEii&xcGOofQ3|n<5(orrNJ;4T&%yI?j(xCVX zGprCRnL!0n3MP*dqHlJ`btgM@6Nuz3ncy+iOJro#S^nZ|Zz<4kk9_y&cPW6SGHHax z5=rOgfOS2&B;_UJyR^V7!}Zgj7*_EM$8fb`Klf7NnfuBhv7Mv(;pU(QGRl|M**nzz z;R12m?}2mKMB?MPbyIi|2PIjIMA=-iRY6ct4)Bg&*mVUA$=eRtzCk!BjO<=7Ky%cZ z|K#ouFa9)49zgvzNii;7Cu6e5Meq=6OZOYMS~f|-LDHybr4Qo$ds%)x#8HO7_aIk` zf3!!ij9VLPv-f?+&bT$*)QRhQUUymNL*Ma`>!MCe(A(3}j_n3-?CRC5q|;q?{08y; z6a)52&K_q%D1RMvH@mE#qZe_^+mBt2F_t4-v^2bq*=l0=pZku$FgdzkUKjo;wQ_4- zXXre}(J8V7;~ldD+~a&$TZY{3!N!PLxpo`tln=x zu58}*)iwEWn7W}wL$cko(57&g@ZM|9YsBDQVohSC!KQWGI_{J6R`2vUyjNn`V^3op zdPj^hL<6q){9#INC@B-aGFAmk>smk^TG7h(?c=IY_+A$eIRF!U>>8sp^uZY_DEsOI6K7=wC@?t7zrO9bi zhkcjmsY{yI&2GUK1h%(LhKBenjDnTnS8ha~nx=l8Jj;-Y^O~ zigDr=F0V@Q0mZBI)iMUs4z$R&*7XQ$p^%^;k2kG*U*!%}utycuW3A0T(%k*(ZH(7p zk3Zj4_PecN)lFyu>Q0h$+|Te0UU+F{g4<>%kkxI#Qa%5Qg+XmYmxf72!;wgfNC>SF zqx6h6qd;=aV++eRm#Sqk?@Q9zX*tqMXdPl}w8#4cWSf4HtO`E(Phwc8JwPbaIj<0=RxTU40TjVoKx z+G*=mI#Tpo`MT7w?@>mpq=+_;v&!0yO|zz9q{ybdo&H3KLdS@Xil%A%adEdr)AuCb zoCl?fAxkeiSb@XE=(_hi_{qKb8mbZ9^-HJbgZr84R9-M*m8kuFP31a&4mEe%_=dSf z5Ik80{SBuW?-EjIM@+?(gi}uJno=@GZ3@@fbbx}Q|3u@4q;l1s^{tEBu7TQ*@*K6W zAF5#hm-om|9>JeH_`Nq5ycP*Y7V#rnisZ zP1y6gRl}z2x70WqA)QiX{8%hdPVb5hNLY_6Kx9)#o-SvExSl2(BiUB1T>4!%MTL5f z*Sba5$mKmoWPNV^IFo$OU2AgtldQ7YLeo%BgMa8{X*WEo{wUCsV$I2R04btl&2?^D zVM_POwex8eZsPLQojY3Q!TV>#_!>XgZ>PpasVyB)FTH{sKKyXbnoJdSBV~_QW1YaB zEjw`zi4%g%bz>HF*hr1T;@aVF~5JPs*y7BNxPo#qBQL zy2$ozLhsw9%Qm{O~3JFj9x47`q8aL~kptm>1}ESEZ)^8PmD&z}<^;=i<@ zvh$je$_TpGZWhgIA|7=!fENUDY(v4RnvEp))rlmvkjNPe+S*pV#CShNO5_ZDSbT{D5M@P! zXOQ?D$W-cVX}IE`UHfq<05ln>>}9B9KSFy;CK*{q{E%!;Ah%OgQbEFZ>Yvi5io9_= zBOIieg9?1dh8l@W8ZHeI=gRlbh))VU>;`sATKHr}@s#*wLl$+5SKb1M-U6g!0@xq# z%?9KADe9#8jDh5&X9sh0COC7Ur`CF=v|uJeZvn?E5*9dr)|v0AX=FWWy;I=HQX9ap zmiN||BwnvPOJp?)jiQrJ5Yf*7`}aJ)Lh+|OzF!_3G_keFV(Y=zMPNKj5qTIj_J!u3 zO7REuf7L#yHQe`G)#dUBr ztgD}wTr(f1Ma|5in?{(n*oNDL+jq3|Huo}a+%q1rP;wj#e_kb;5l$K@V9`!Lbch(j ziN8fWQK)?Rv4|5GfMBO~X}rue2iK!ibya{7img=*`Ad`pxdRbwGXgPRcVw839H+8K zsOpG_URD(M(==d$Hzr-U%lfg`An;T&Lu&yQZs8UpkdJ(nfY1fa03~1>g;G^8L0C8N+^QR!R9VT`cqZ$T$|?&Z+9W zj$Bur^!d$`VGK8`@so|$&`sGi6bsHe&Uz?UMiUcK<6>Ebn|Z0=m2eL5 z>fq4iwt5qFQDEk@qBjz}NdF=`XhO>3`{skHES**|H>)2{WB@~Cz%K1wBFoHp&nRU; z5XVCiZRF=gGG5?^I{*xw2HA0>84kDB5U-O^FRAm3^^veG(liUe&vQso*-ln$^7C8& z+!DRD3T;xyMwlNUSbV`Lw;eS3oLjocW^WZ1LNSk%!^>5(D%v-7P{JJatt>4c5tsKS z4{Np-zAVgm=PKn=NG^hUWNRyF{i}8)4p%VZA$)@A>-;b$y47N**_^+n=?2T}nr;@(w>{r`Y5MU{7r^ zd+|(VsK`-Ez|`2$y2uTu?VA&FXjG2wsqu8Yj0!P zcBD2~D8HWjq@o<$pgoPXN&W;VLWYJ>t>aeKJ3S*}3C%!qk2>nCzj`;)J{4uy)0M{x zHZFKv(r&%*0SPkfz>u6+Q=mh+r`gG`qX=4jPMg(~2yJ$UlWyaE6&?+?nil8BF3k+s zxr@A3zTnMx)mD{p$6aOY3D)_&iDAnTP3yKfH^Vm~^%x9ZD)p#!zX{H^j)y88?O0~| z!X&wbs-;OQD;DPTbegew;Sk||;ifQ6BXy(Gs9W@9IK#)_$3W5D2^Qwm{ee9s4J_^O zz&cK8B(*b4C)rEwp2jr%Ixejyw0DGCuTjRZI^t~4bqZn+*dgBFFpi8OA-w1?4qOyx zwvT^~d5)zj0$1eJ^ngSDVxu(ap(fBxsB!yrd%Z}{Wv_}ajxVxL_m60;yeGOxyuXus z-d0{l-{I%`X>3e32`m;zuy6&V7|=Q^41-F)>k8c5H*pzr%`2tnMNa38EhU@Qoy2O! zyxy}BOQncqXE6lw%+)rP59Q?S+4(_Ty=o*?l?VCqM8cJPEsEjVao7#r5)2~}Hhcf{ zK8z$TNZuyo`?a-LGg~)EHXBy?gf6#>2Q40f*xQBz`3*9buF=s09a2cO`9fV&@<~$7 zSzHu`YIEI&2gUodeY_;0Ib|J`{{^U>(DgOH(ABBH?5Tm<-KVdVKT8s@Lk7=r+(WTK z$<9G?<{m)55WS&bC8Kx9^t0_@*i$;)Ukw>B?tRn4@>=;Sy-zff(;KJhZG^>p@r4Ct z7MwN24zEv^tn2!P6mq^^g^idD7K3R|F4H23lEQsnEEW^`Fi?pVf?&6! zLcf|q3>pnvT0#pkzB+Bp^@fd?P7<2FZ~HR$VYz{RP?npfsa>n1oTN;vX^QAEioz#R zt>3n#sU)kp*1omLc5keEF6yGMwDpu#xAmzs4$`&5ZsU8}lyGNC&0gwKbdYi1uek`W zEd6dA*9}I?O*qTD*UT(bjkdOmyNu$hA3JRg31m9l$51zC|k0n*=ac4ZM5!*5Y5crY(!@tosS3$)>zs|SyS>%4cEbebw3z8OKSefI( z-PX*6L{FQ>If`_ycyjR$eRFA-sus1KBD;BgdTlD%TLEi4_P?;^2k72AWCmy05+q^; zU6VRAU4LJHeWV4KGNb*He~R%c^YfEwOI`~tw_>0^Mb1Hto$2AE{n^fypmpUAUO~ap z$m;K?0k}4;h5-?dTsy0goU1%U3^Hy-a114F2lvY;);q3SB^58Z@xb)C8f&B)_w?eo zs~wtjsIv?u+}}(spe-kWcV(8$+CT_C93m?p^r5Q@$sj|Gk{M<6iQLB!>X={?_fc3N zJ0GdS5ygdj^CmX3N{#Abj}93bhz!VEY6sditfv|-AU_6HD+5-{^hJRV(OdW_$Yl2_ z_G;RP?p6lzg4w~5h|P|=YZ+VH`txSSFAPSJ$mb~NC*@KWFjpj@mG#@FN0^s;l z+GR`du?(y)2yjD*7uX1()VfagHP0SsrSn|${UI6EN^{JSssf!P9|rlqv$Uf zDQl*E0arKxTsx3Ds*6V$AGsVmq?~W(4Qxi+@^Y<8^2F`kX8GR?dMD@zESkJ)HA>cO zl*iPhOqeCn=kga**YZ2~bm$ox{4lj2boIm4b-SkY{e>OWspE&tz7rSnfooFtGWCgP z?yFbT?WCSKqHQ~;mBc{37gH!EA_^-!ExL`N$5k4-EAow8cPddDOvH{buBe4_W zj$C<`)f{g@QmewjwjIKKZTwvucCoo~dW^7D%!_vhJek_Q=p2@tg8&9?u3BCjt9BJU z%Q2d&s$I3AefBGAb)>SR2#wHng&Z5Hpx-(=_+D!D*#_lcW?D(YoZ@D)_{u>~z9e?} zFly+T$HUF6xar4!<__IKKhk=IlutL*)Q&4^XbjaWk2#iGX4b}ctL{RAEF}UT&bJNx zRm{!&z;_q1K3Y5?dj$cJcnOBMjEIfB`EB3Ng}?9jXD&z9pk8?IgY)CGg6J*;u|a&l ztU@M;&ydPw({t<8d2Ozakd3{(Ury`q{vVPF{scS!34jIynHcH+l_~)IS86Xg2_wb- z3;rwTmi!~U9w4g-Q2Pk+rUe3NSpf3R`VLMWfRF6kzY=-foc?{$zY-J3O&#>@%v{Vp z01UK@Y_#SkjYa{e0VHfyE;>T6ewnl%- zu>X|*9To28U~KXcbPr^Q{fJ}#=LKM4W@cmtm;nC9fGli(g7yEr05<<%^gu@TkC^)Z z#F!XaK7!8w6Qig96T1JO7?6?mzcD8EkN*5;Jtjtm5B2{S_J61cWM=;_xgRt9F&_W> zTBiS&!N&fP*zuqBn1Rgy8w0Yi{Flxj82kSo7a$AkpNaa9_C7F{|J{#|F{S?>W8mbV z{}HtB@Yj!YD4KhG%-cuEzk;ppM@q_vCVwVS+{VQABR}QuW1)`>6)vC&6VQ->iNT1` znAw1l&5#YqrfU>g*mI{ zoK>^NSkJTSU8M5DqBIP&OfaMa2e$`j#dkR~14A&(0D6F}fdvdVH-Jvs*v8b!48Zaw zQUuV6np-&;JABnv`cB5e#)h^=#sFSk7)K`uV|{BFH{iiE9ozFE_@Ju?szV1aWD%x( ztjYDm9DaFbvv@-QhJgIHblVkc!)7xdZvsgn$@brk%e)xi4oIn7X>@UPdT}osUKif3 zv+u+1PdJ}l4%t3#kC**oY}d=%yx!ZU?lFAB0v}wwZKXxtBhF4;5nh#pm3dQY!nF`= ziMe#77#KnHh-nVzh7a-3Q8w60dKS7OO84*kVpmRSHi-?5ZsQO#=fnqXQ+tlQI#S>r zi;wfAW;}s1j^0)IuY+FuVTV#2ywgwgEk}FLg-+UI7*A8d2Sw~}DXSV4>+;7!nQ`qt zPT5;qj`Xgd@vl>Oyz8ygLA)*feD^#0?#HU5^LLrAn;W0A?>kvCZVe=1Ek%inmhD?{ zueGvWLNtR+9s(#?ao@5OC}J#PnAm6?8j%P7rMF3u@vs)74&7_mG8LZURIMm z;JZ)z5!-_aRBm0p>|AZS25hf5_4Zk2T(?)sE_T_6C zLu*j?s>mJ$m$syw<|)l*Nvh}l`0XwkAL#&YTtFBl{#&}dpmO;pE3z+NQPP=7Y1?;Y z#)3wHba=8VtwPJSim4KXC!2FVaJ(qozj7B`aNnB`UfyVsDB7$0q2u(NhW_}%t*-hw$q(zTY$+@0Q4LH|(l z%jhxBtNae=EFFh1x~sOq_eoh>hNtO|b?K+A;-+=Hk5d17y>)$OXl4_mzQ|)bdlCpc+=u)6=f&pKnL~xj;1Gs&DSN7+gBdCXy@E&fG+9nf zNf0Ud_3tI(Y*z6ZStYIFPibBUerMyM>7D4hU7hKTo$zEPKge62G?izSoO4Ccm@`@g zvwNy!lxOX?vqTc1o%f+&tcQ?QXhC;FK}i|#X6`NI-A_EP!bmr8@Crq)q@o-oyv`or z{9n5qBFiJT&%8!(B)|f<1E@8KwD#1i=;VF-&7oQtQqodk{1q5)=y&FjJu{0i1AiD%|G-UQ zH0o$qHR^7mGbw8g|Ajqm@;YV=cdluCj;)7R)FDY!#1`dD6)TThJm+Ky9 zY}P)5#lcsU|6DXiMJb9rRM5UBhV=m1p7)6SCiHc3k6Vl7JFD;cy5V9Ycqb^xuZS@L9^ zzY-3&XJuIK-Svtp z6V#2~z8sB=)MXZr1KPQAnbE-3n8p!*{b9_Z zI8mpz@_?eg8l8cF_M?BN2oxkD*}q#re&YDpy+nmm&dUs#7at;B_RmM_P;0S*Ba9`T zz@$7^J^o&+lIbO}MK?|JX8N4@xw-=qAIxxsXV`d*HY8icReoHXFuEHYPzI)j7gORfPt0nk)JmN@Xc+t=@^v{3czv2>H5GLR)z z=1w}rv0VkoP+Q?oa$C`$O+a;?7l+zCHW_9Pf;G(z=FQN4(oi(;&)>#pKI_u>Si=~| zVT5|tZ(;m+o50rn4&xl$$#T4A>s-@BSI-n^^kgF5l|fJm1NKxTh%a4c&47Q4;H>wF zZhF5iFvA!M#0Kfq z8|U(Hc%SV{nwn^<#L!wRFn{64!SA!(dd7TTX*^jCiDrFMrB=dj>!Xj7`Ab?7^8XcS1R z++n&+Nib2uYsP$ z*SBNOucJFY7>UCL=?h-~e;^gm2GCZO3yYvX6n>BV$V}jXIGu&JAZ?5eK)TbC%T7n8G@$2+EYz=3wzL26> zo@x}L*SRSZa)RV~CWm}%Du2)HS|#kVlwMtblCYh{%latWe_dTLGp%Wqc}6@E29AfM}jsPg4CCRn)=0GU zlp&FlRD1Chr3$A)0+`#xUotme2d%c|L78a6_$da?XzWTnPF^whl)Eo%p~Ep3)##NU zr0@En>1=BrRvMUT3Y`;``-@_4G|NTy`rg0VMsX@KwnH$2RWptg2u72F;i0PwCDIDc z_&V)4sxpq^kym;F#zi1Rz7|msg*Gza&L%|)`z$QVA#|jm$A-icON^eIg45}BmBq3? z@0WFkeCxu)l!jaRl8a^f`~3ZhaQ4ypW9Cc3%3RU!VZoG$p|L7RR4qS7o-Z$k`mT18o z()xFc8F zoxZ+RXe^FsS{%kX6UM|<=`v|jV%9!M*`Gc1CAyH^ROen61|_q>L`q!Qd1xXMbtKF1Kza1k??FM0+W(8gu|a2!t3ck zq@zQDfnmigkO>M+6h4CHpkZYZ-fLoenEErRDC5CE=3dni-V?9SRbs6xF--it z!Iso|$S(*x?(9y)tr^1u_>-u#~H8O%>w?r1fn!ntv*;mC-|Tq`Wk zR7;jrFu_TVtlsNPn5^b-F|q=>JDkGUt?%J*kt0cyv=?7Gm5X>yX!|pjNdks1B5osJ z&f_NTI-jTOF2ihB`G-HE%VCjgY#ET%(sHX9M}DCAm^O+qjjv=7OuXlvK4vWDA@p!s z0h&mEu`H>=w_+4Z0$rHpxD_`X6N!pMwN9s-LLp(KMD??stI%oX*hnv6DB$mO1S*QJ zMhaoT&_kXen$2KZX`&c+UTyN@N4a!li<8_o&3>t-EY&)-)jX9f`DmWH+mq?Io4&&1 z(dpBXHU}D)XP>}*GvhBX#x_R(AtPU{zZ~W-ars*_G14>s)&6ZW{%`z7+1<_?}GkUxesCj{Z*kFCr!HU~8yk>;%yI@+K?_pi?$>a{}l9=!9&oY#o&B^bL&x zfBBY>BLjf#AN#z#Up(u}6*D&Z>+ z5)?q@A`CA24VgmKNVx}CESShpPzVt<4|7T!8} zQd3LBhw2|MX?=~_*QmSY$sCtHePW5&gh(KOU_=&KxxL9S4nPu=>IbnHKf%WC6Db&j z7#PuWq4O$;_6Dl09*YmD+DB88$@X)xHKwpSn6o0<78f9<5lH`?0h;)ARt`i;Tkwh~cuf8i$1*pLqg8yKrLhcFx zbPEpDu$gg(A7rH)H?Rvw4jX=cZsiD^ZAnXY%=Vh?Z#&i<#PO6me)+1(RfNIkM=XKH zWGr$}R9qw_JTU=>kUVYl{)F_o*TZ2xIoeO1k==uF6L_qmaGU@g5Ng&Q$nB>dLu&*R z;Ip{ngULz`iRGPX`OVB(56CN!L%u}SMgNQfAe#D>oU#Oq88i2-J$tHZ1^w%Re8 z(c|Ojg#cDN%w|VgfJo`^5iZ)ufi#aAD}O7%suu($2GS`9hS`sn9)$V_JgzU39q1+i zD8N@72{Z|mRvg5fAFNJ*cLfMWfMEsL<(GvW(1M@G44AAxs~vK|1}a?o?gJ!?CXE$F-m z&3zaauT`=AiP zx%w%X=tEe0!5R@e2CDi?29orPDTXPk(in%VCP++v&;Er5i?v1S@u z)JClcbV%Tm$;N+^2qM8yB%w%c@KctsAkrb@A?qZJALTR>bHLQ#*OjXx$p+|=BF6E_ zCwwRSCMSt2BDgBcA=%3B&hIW%s0d$Tv!Gs<(w3^@Hk(|J&`0v4=&c1rp_ji`$uGY~HCJ?}p^qm}LL@CO(ICVi%^=j^ z*DlMT0%=E_UtBYaTq*xa%^YbVA-3Rrq5E7*gG>YGZ{y$ecw%e?uk-OIgZBi-o%hhj zq{dnY-w$vPun+3T6e0TPfBa!m#43!4k7$pGzGFUYo!*;XW34vV{p_Gfe5SOa^h_j5 zG)V+bj4qQ_v0ePOm|13BCbvLuL1v+Ev1;+UNLwB`T|DiYHJz!){JH-Yd|J~M>e2FI z@z8(Da~hAGhTVmYg{^{}%sj+`m1dJRns$(O#5~+!rU}~!rg@-w)Tm3Jn0h&(Y(iQe z&XHSLbx}++%UiHf&QobvQm<~BFIzu_suM@6#U{|K-K^v)C?`dyPN!F`aFxrekX@Nw z&@I-jGG8;FYlz*j$v*6ccTbC~CNxtb zP9kztMrmbXQ~5{Rbi*{9<)IlFb2&@tfQf>sLh`~gLZg5?gZ1|U_qO{J(9P+QXfL3H2)+rq1ad>O z!0Dj9IAYRLGgpwEySJ#gdAqrS(HMvb3k|f3M~}>nh=oU@OQE}pj*1P6EQ-m9{1izR zkxEvl)~MeXjnIq$6!D-F;Gq2^ad9Ypn%j^H-aFrg*}J4=(oU*B|LxFtP&z(c6{FXy zVNZ#VLx8u1^cjlUS9%~_b+6azdH}W@-yZxLcQizG6nUfb$M`K4EIFVqU{*MJKqu5t zqDlfyVh==4pmE!|PVd%lzce8xA)kV&bhdQTJfPuE!@MA=epn5a4igV~CtW{tKD|)e znEB$R3ThtzY^?NmM-7wql&BBs2k{7!bkbiMu=N&oo2FvM*<%V5$cOj)Y5O+gc%yft zuih*sEUwO`T`nI%Z@ny+mI#%BOI6xyt+K8v7TXNMoEWo3PCV zg&c*Nt{-OgSBFD_t%3?WoitNg2v&nuXjT^+CiRsTYso~qvkwO2yJ5R>r0a2KTN++! z&l?ku0T-QxkA<(U+b&S9>l-I+cj~DI8oH}9Rogn12NSn>x6SQp=UuA$TJ}E>&7+N? z8yHLKIwh+u#1@NB3r_v8qtgyMPk%OFWd0srTkhVn_L&Ec1RaFr#s7xe;WP3iZ&@i| zJ*dCFQ_(+tmUr6LZR(@vBYYu#6FV&yC{{S!7wx>rwXn=q!j{0=7a2EHyJtUa`@a1k z^b@uGM4lc>o_remkjGSWG+UA=DwhXQZHLotFm)tq?S8usTht7@t? z$NiG4b#Gc7O4mAH-&B&zO{+L`xV$pGIxm9WBbV^ovy$}Az0FS=udF*y?CRf>_9~;g zz`c%MbYAH;AKSbRqCPWmK*J$kE@``de(J9CoOo@%&q9_W>heu^u)mwV8=mjwk;BMQ zW|i?(zAye+HYx4AKhIhIFG8XeRkME9BaTrJlRK3zIZCE6f*Dpn~r9q}Bo;Zb`( zcs(AGJiiy%P4Ut6lsH?S?d|&7X=*S!aBVqzmdYo&S<=Pvvh-+uWOTeg?`nQkR2;Th z-o^Lm_n!M|ct0F_axV8rE-^cj&(#z9?beOV+T?KSDd6t)f8y?cVCX+!o0Z{z0q?)S z_OI*<14{@A3FeM< z{)_Iv4E}E@nStSd7!}fY(zmiT{TKN={s;2^H?IB*wg1m3{z2k&O3nsO{}2Xc2WR7d z!~466f0Y^{vI;T+R00m>`c^W^|AKd6V@E>=b2}$n2N;Hb1EUkM`MNeSxB2odqi?8a zYprkd-x5)C2S+C%GkpgD6XVz2i~fH)42<*uIyG}6C$oPR$i@!y7v%rHJxol0H}L=O z!^X(?zbt*V|1*z;g%!a5j~zBPb^t5u->m(&mL9;t{FeazeHg#=|5bj?`m*(R?pI0w zZ|iUG@0h>!KkY9cjQ{xgTmJR&Uz)!)J>YNjf9HP9f%#YRudV+hng5f${oVfm-spcf zURK}wi}d`DlvwGTer5Y>Z~x8_{D-{Ju(GlMXqXsT0F3lsz8UE0S#@COB)-mvxuJlK zsg?0pU~~eGhJUFc2R+MQCiQQJhVd(PBtW=;S$237!_l9REuDuCl_n6$C} z-!#K~t%*+kKSNj;IQ|_X4q#{gvg%^`PgViU%wOkMM&Iq<-am2t6XDl=n=vDR;a^7j z-^cSG!GE34|KffBVWt0e82}6%42Kdb7L;l6+}pJDPG?iAoU3obO|7S)(&mgN-vdRh4zX zvRJKu>htGRD)eC2$JS3Kjw4Ug)~Br3Oj|zFI+q8PivdB-P~nC_D)dUC-95gr-NeDO zB~jr-b+3TqL!GwMI0NvvF3Lp1}?iip)l54Isv| zb7+>!re`1M32ZvG4)#@i*RTs`s??gl9!3(wRU1r)vwaY&t2J9qtgEK3nvE7u zAJ^S}3x_=OJ!ZX4hN?AhybqD%AR@?Iy&lR6GEho>h?b2`hh)*`#UUH<65c9uWPdNY z^A6nb9^Puo)HJL9XeTs7YWDdPzl(y<0jU1%?J?bBOv#`w&k}ai0lIubUSJv{LCMj1 zbdRVq+s8HKo%cG*Z7%NGhr#=n!JL3AVlX#TW#sMFtxSF38i{DxsaUagLR-I#5Kq*y z2Nf$8Q!V`amlKi%8EhfQec6aD|LH&d6Z*I)r2P&y{{|Fu$TtIVptkzzR{0dWw))d| z)9}jpWd%nkQN{QbJO(!wwC@IY8~zZexVR$uz4mM^H>U(l75m#mg|awVj6-P+6))vF z)pJ`}a^TNZ@{L#{H`5F8M=}XQhKYR{?q?YkfwaJdpavKT$YX1M+j!Z63vsSkMe!WQ zeM-fEPXBxCxiuq_xFBZO(H;cX9frX~Hoju6dr3og`ywtG2?= zjAJ18-F>>Jlu+@SOfS$?3JPW4#d7rYyDw=Sf|wc+T#zu~hzb0Ze=(z)6V6?TbGpQ! zxzIp!*~goXQf=?hpAqSW-74JH_?eCOYZ{YS4H02FM z{SKu50$g(sN>op9P4}#N=-AO+UI2#KE+>UR1MP~RlQ}&*#nEu5|9e%ItGYN7$IO)F zq9etH%-l4@1${;~(uG%Q7z8a-`RrM@QSxqfO3my@slgHcLV9M3{en6#6IrwLq0VfX z*$ywvL&0g;p~!Id@{WxEWkur~W~BdNehf5ZEP>!yNLr9UY~T40A7DDzb}5Uxhn>AK z)g@VDy6rxU8aTQbVsS?FWAK<+*p^Y)mP|Z%_&LD>-hw@GED}6Xj9nfZj8?qof6odV z;OnG>p5P8EV`_wE$k|7ytU&Fvh%>oIcKX^9b}67Q-Y&+ZDHlsN2bXmEPEn71tC8Hj z;iu=qgAk)@ynG#ciMFj6P`5N{u3eMhMK}0a)zNvj_GU-dSxj|f)v_dWMF!4T5~Ugo z7MYJLwG{GVhQfIL2!moEmq&dDGdXPhSS4)!2dMF;{4uZ_9uL+B>(m&i8;S?Vn|zLH zzUo@b%-uon&*TivPO{iN_ROtzUUD8g60*=!@K8{;hX~w;Z`5mB-}0S--k-LagzE#s z+1ad%7B_f{G#+m9S^ziNQ#q^-%F@zy)@vV2Zg~IVszUxV%WEGoL!c2lb+1uy(14I3 z_-;^SVy9E7YFW3`RzY?tK|bAl@@f7{4X+--LOY!7FN+#1YKZvWA2V1+piD@=%Gn&1 zr5q}c=AaLXK?RhEVATTyaV-($-;`oheIBv=8ygf zDCy!#QxJ3XU*ADib-8NUsqtmftOfV=v=+amPOK-*WNIfn6DBNA3$ZpE7*ZT$Eq=>= zced(a85t+Uv9-1#c=pOwoY^bGkn@zCA*IPiBn%!2eZ=-Y?`gW*{ z1+Hj)FgkkeOf5Ks*Fu0EywQ(_kcIJw|H^ zp;d%iVr)t@Vh78{kcX3f$m5&gl)(NcT$7wMnfcS7!$F#di`thabuTVOEsL|Z!Y<__ z(%6yB!O0HdxHuH+WNO{9agmGCL-n6J7WNgib*>#PmwIhlZcl&IH+1I|`Krxr!l>XFHKL7m`nJEp_ zhveL7$ozyiGPonDAfRhcT?O;7h80aG_gGLt1-prgeyKvl_ie08Pl=zTkg_ru3mZ7` zP`{|r+yu|s)nR2`bJXcN8y5Qod3>pz9A+YBMI=$xX)&oXpw?YM``Qdaz+BlGMkm55 zciZIi+8gtd&{Pt&wgg`q*9u>nwhEi}c=}*wU2ZA5*3!#b-`d4CpKOgAHDvFZLz0O% zvR3AvKAVE`XUkk*0(!D%${Q1V+C{0-w~Ev>Y!NQN zyTJDr^JW|xcVf3hIPTi?ed)W;)U!JKhs;JZ-+wr&Jmq4+Hwd;h!4hHBy2H}b2u#) z#I9%Eagq+et#K83nW;q+)~Q^K?IY8p-Y2f9zy@T%b(mYr zOnOv4W1jp$J7HX-o%>Lo{eR$QuM=b=-mo-eF5@+g(^{z=vA>o?@B@J{RaUpyi#oKu zIexk{T9-yZUUDJ98pKad|JBRpKnll^Tm}>RcoP|LT zs@P6@05xdWRqqBL2-bxmx{J7nu(u*_{MhB05a)?HXS%L%@|@)f*^8kEomVD)kzS1^YqZW( zl$2BKqvRIo~{-FS(baHs9wNY)vKc}?R2F>8x^_DTbiu|D+_SgasTNB#MLEon3 zmpTUX0H&{xp)JO9;-Pm(?Tr4=o_IHZ!L7MR;ECeU(h#3;XUCZ&t4sZ(JMmisGNP7C z9r0&+X3~9}MxB&Rsc;%mqvXY$=844#Ed^;Onb-Gs4Ng+8U|UhvQT1#Z)IMC$=*W=TGQz6Myd{@1`ylpLXx;4;dkxk~{EQxM$Mu*XP0b ziLBxeb4>fJs<8(3J&m0|VSN2B_oUol)c*`2%7*++8E8^X;qGJ*#Dz{lq z0jCGSBd>L#9_D;MrPIJE|0Hq?m=h_S0N*w&2vieK*LVm@o5HQ}4>KL3#=zd0VZ8?@ z_+fE_Dl1cX3EGe(*Ym}CU*DKGu;~!#iP{h!+-u%)xPQ}TP;OOJ%X6GhUS)KL9na?p z7>(Y$C^JJ&{D#pQ+9dgO5>@XysZ@3vG;`1B1{KwJy!ch%>S% z8UH5cffak~iIN;G*rr33II57q#$VRJUvvTLO5lrIl30{K2szdYe6kb^@LP=6Fo6Hy!;qb@f68SgKBBcby$-(9^N0ooAwsOy852XD<;4WH0ngWgC|H? zQms>)`8R|B`d6gMJwJGYn%}VHQYar{Z|~un*qAgl1D|*VO-%8}14=`wC$$B>?#xG~ zA7C^J62Uo>FFg7?TIl8T6%ds*3U z5~rL^ibJg}RSLfGutLU0&zzr03d=FSeHtzxdTSFoe%Tah5%f219qp%?! zJj0-om`vMH*29PpO%F~iIxZNjl~N4Ql9XTwkVc7?I3X*%-(`q3uCZ4%vNW03QG0wheyvG*D8E_15W9sY%K#@mUD1gcAG)8Bc9ZheTcez9vxz&7 zafoav$wp%{s1VvpB9=o9}lrZY3A7nBN9rNm0jnSl9 z=+T29+w;=)Di+YHMjB)6rPkq`QO$d3=8~c1wIfO0sPg<3pVeEo8VQ!OgNxm$o1-WD z)TM>Tc>?T-(nXSz?4ifx`Z5f|(0RNb>L+cw=;4)$JA97CeiI-z7%b5^!1rSLo_@`b z7W*j5ikbE4bq`RjBr75f$TgK3I9De)=k{#3SyR1P^~ueH{G&YU*`${Vg!YfqRUWse z$x%`Y)1NaNSr@h`IF`QVxTRZjN%_Bn+_%b&UpWN**zON($?>CTn2`BI9&MoP<;S}WDN*Hd6m-b{SDDA*o?bxVHx?4W+aF0~(B z7p|X|jr`Po|2bc86^nf+uurf4sMVCxwG62^KINcU1vJ<{8!bv!C{E1I6vfwqYv^u) zP{f=A$8*;slGnYAs2mGj&SJ0GXx9nD1v6 zPq(oOPk~O7od*+S6NFNHGYhDp)CqeHBr)pK(DC8wPnI_KOaG#GD0^YnY0FcC1R0C; zm5Jm)nY85up>TC1*%x)8^Aul$mMBxX7|ny`eZ>NuTWp8qOK6AasxWHR!+6Nq$CwY* z-Qku>@xj})iD67fUhWQ!@8rZRIXs95!UZz-%|=Y`)`$8DY`eVaZa9i~bT=81EmaVE z;qu+cHFzXa;5#Vix5qiv^geSLmI4ReUT)impD(df2m|>8V!cc?93bQASIKbR(i}Yz zaaRTEu~yqaaedS-ugetrf;}(Z&XJ)wBY)_+Lh#(@K>dbJS()gUYU-Wjd&wC(oaE_rygn)uMMNq*#30h_nR91q z_0MycC8%xNv{ei60!`{XZ{@q2<~fCMaCgh%M9owVL1H(M;V@`6Qe+ilPafIObSU2~ z&>r*2i7O$vROqQ7_UMWBOaefT7CUbE)E4iGu-KZb+j)s+bFNI+tC@)7~ zQxqL`ow+}X9rT=hdcZKXLNZQ9rIwtp=nf5TdK-*njO`BpsOMBCF1a|vNB9oZK!{t% z{ydM?F46}<$%Hw{YCz8zuQAMEqsoyq*>lUwJ5~Wwo&B)Cv`g^I@yYQ5BX-nx81WkR;#*O4vf9v*ah6!?Gw+)-Fni7!3O_y147h9Xe^pj3$(OQ9YX2| z!Tg6+Kec%{3U4g}9s>3yC?{Gf!som()vX+gRg=l~GF)|4GGAp$*Edn6sZJ7@mkF-2 zpI_{4zN0Na(Wzl>>CnWE?7?kVdG7lPsy@Xq&vh&v_Q_4OsFRR8E02#{n*@j5p)Fq) z=b86O`+WR?+H^!xbXF*Uq#l#;Q|Lg(tQFucl8#oRlzi#{-r1W&U0yeS&H{Nb-1%xn zw@OmIjA0b(Imy6eTcifdOd&G_yC^!`*8#p6*!AoO`%A0yiE&X6ATb3}KbskHO!55= zwwDB`$7UMxR?e=s7b!0Kj-doicJ=p!V$s>?lqu#zt>c=U^--?$G&fYRwja z-6oF1gzbsYy<*@y4PVe)nSC<7IXY8y@78}+Vnno2c!fLk99nHP@2=kW^w5#CT?8fJ zY|w{e67>kX!>C`!3ch5bJEvJMaF8_sJWKUOFk!Sth-1%0biv@s(D9kQ6oHZifD~EQ zcF!l@TGy~j_}%l`Yg+nx&z=X-EUFrgB=0{=C^nRp)QeM6 z-oNWDvrAocraC&g+C&=exthXfQa?4T%JG#~y$^e;n-E4gSY`_AIWIyTLTz{KXK!liYNRYf_n59gMqxyZV>fm53pJ43ZwTT;!JlPHpwJ$zn3l9qsNg6qi=+v?YI>PR?B zT3G@&zK%Kd6bO z^xo}FsF}~}G3L~G)4dzjubd0@R1rMY9G^0MDB^grp$a}<^t65!6zA?~HjDf&&Tt1? zsAuVnc;@HvVAPNrQ;cI&nI#R~q+lpo=AogoFTv7x$Bqz%zzB}h!}M-SSWDl=5TB0M zUN)_iSrCn!a1`EP5NrKO)%mdDIzN72fA#jb!Nrk@UYtCOe^Y=xNkt{mDVtcsB9h4| zl6!8h5q74p@{Z;L-R$V2-{Av{?{K@t<1Z@Kzl4uh$aBJS)g@&sL)WgPHjy3s%pU3H z_Ck0408&$D*%A!-WTRMVcLmYO_4Qvdb#Ly@b1}Z%x|g>J3unVU*8AvZlR{T_>jZA=r}&J=An1-5^5Xqs?{aXf zM?Nki;U7XmAIBedA-PjRuq_6Tc7uljXqA@A0s5S2QUjv8h(G(gjLaoO617!0WwJCm zw`MXr7HX(lr|L#;>khQenkB@H10%;yL-Js3H5LxE*qsjKS(%ypR1MoVn);J!DY&eD zS`Ayr!Q=L0Td#X;FR}T?r)jA;N3yPjj5xb+IcSOBKGu;xK3_To3gRA-Hv(ltmeR2% z29n&(h@29EP(-SPh~=hzP}&UnRY<9l+@T4+V_JO!&iFP2qscon@n$cwD^TJ?zF@CF z*DhXlmd2-64C1iXPYvc3FZmeTKmf4>%f|>lnMY@E2r;vKCjI z-GSz-e!VH*BIu|b-+}5}t&z-U55qMV+bKgqem5xE)F+b5y7}QHBkm!>w*um-`9T=e ze5#>{(r;U{XfYRooL)CzstBdg+udsiH$y0KvbMh@dUa057*_hU>U^HFj*2Q$IbUv{ zX!8#q(rsHf`3~A|uW8b=Qwp&=1E&K!k~>=J3atj~3K7X~ZLKoi_J^==Oj@j#_!ZWj z4r=bwxIFQ+a2iC85cPa^Cx-5Z<`9De76|9Xq^;Y{4&C{HiYLcB2_JstfjS8+_0LmU zv8|MDNVA-CSA)2YOvPwi!H%xvI-SAezI*sd@`$6-pEK-(Kvs7@$I%lkVrY%KVY4un z$S`P{u`^TH#<|*TYTIh*rayVRbUS4`C$~}kU{HTAI88~bhUwQw6@pM8QA zJ_uTTgi5vwgYjGKw{4A-%Ybp#Ocg#f7EgGCzUJDftV2Qu|H!C~Sj@>eu270c>?GiO z3w_8F6(f2740g;3ww`sFJxs3Bv&B3RfoO5yx=8Rnb4B$mQ)eXNT>2kC=MVn(k{^TN z9MM=(8Klumi>=oLQAG9<>)kI#bv})5t}(yzEdqXf!1-i=A>o~xiH?AkBet({K!j(8 zrx;;qlx?Q_yq!GFZB=~SCKdBSUEOTe{J2AHx6$;***JbbAWV~!>CUn0&kFWW*1PG$ zvE`#Hm@%qYMHUx>94(IgO^X^|X>S?WI8_$m5{aNQa+rG9au`HwD3#!{XP~B4Libj5 ztGXHdA$P~?jQ8|0_-XzzYe#kt9USjxA?KV!SVP&tQ$a8lR?*M1w_;`qTHY+{A_J>q zmS)>I@ltx}IBo7RSETqwd5JT=f$^}1jKV)YB`Tq~uI!b0tO%$CADTHjaqiR0Ph$6D zo3aPTOJQK<8jIgAY>n-#^OZT2S*y>=69hB0`>4UgnByTT4stsJ_O&3-D7o5 zB%2lK&S5~sp=MOjZL`h6${rexdf|69dH4tnRydw_EJdBXgJ$5>XD=cA$q$*t-Lq7k zjQ{0Gt4DuLbrKSFxDMkoNsHOXqFh7L+y~=4xAKT#>vmRlV7s~EvK(^hH$qpX`E<9j zT;X$k2-%$JZUmXwG*M;DD(sk1-FbVKy!rD&q>2MOhXba^)TUxpdlH9?kj*kdGf~wr zzg4GATf^Ln4d(|C9x5#^lAUyVzaBZ?z@5QS`@>vFc3H+o>)1RXN zbPhEz^uVRFrFOYmC0?{(@pIK`*QDafdYIu-p~s6;*~HQ^T)5QxeITNt9CgA15nuTO zH?>|o*F(SlPV9^@!~kRjU#_$^Q6Rlg5#X6(tZ*qxMW|x%z7pqQRJ4u-i&g-_d`+=7 z&77qODcUcpz-w4l@^(cx`Uq#G@)@hnF6P7M;<3iUK3WmIBb2++mBC zUTwQE&ayz3(d6(x_wegLMdHQUnin$ddW%#QkA9o;mkt?|MbiO3RS8I~jwn zdMRzJw41Eg`03VIT%Spn1jToq(f2d-iFPIW;xGC4%@#(}euei_>EtwC-xNs@rB*^i zs**JuL6T#CzZ1U^ze6FvUNv+7@LIQkQV&*F`rz$8Kl+1-0;RT(QxP|asA)pl!? zq0M$-r=UhA7vE`*)&AtlPi;>Ug{{>qMS}&SW<|bGZO?jRsagZ=#}n=eWlI>CVJ}WG zOf*EiPhmxXVM?&jijL7^YF7ki_mYe|x{sXw$>HHEJW6q{zxQWvm7Vk&VqK-2TtEJS z_|%^sV8Bo_Zt+r%+jCJ+&XRmOgZ01(>Qn-A6vgiJ2$&)ykz%#Vk+>eUu+dD6t}4L= zZH1_XkZ#{Npg(E*`ke-Lz2(wBX?vxP!#Nm1;>r4~rNiJha4I{MtXZe-qFt&N8FjwC zu(}5syda=z;rE2epo&FQJ1zZkyVH=Tk9R4@4rQcD>Tt^xo1Dw!GFxhLnq2zFWfOdK zBD!3N%=fMwx1T3ditj1MDY-0#@ssiS4?;&GNANmDdbm2=4V4Y`_no_Rd3wW97+f9r zneE9J7g#7`^RO~`q9fwDVw@5L9B{vyl68s`DRN`JewD?nOX0q}b!(-#yu*7`Tr78A z>&Vn?{r257vQV(XJy0E$H5kcKmvE^g$80HNsTu%WOY8aVR-hJQo9n`JN_n%<1)Dr@ zC-2vz&?E6 zV2*vzsCP>M4!aA?k_#WEX%5 zQ_475Wv5O3wCHl-CZGKQfsNN?aCB35_Z?qX+tch>)yDe*;_{UZ%tTZ&jfQ;cZ^?4L zj^H`#CnxXbH-g|LK8nP*+^cO%x{iC0<`Xu3(4g!*$61lrP{@F*M^*9l8Z-!XZT!f1 zcF2l4Gwh=WH8*&K@P1N?gcYH%O=DJ#UC~NUX^|oE*N6pBxO!2<1rAGQ1mxCmrjXe5 zEQBcbe0rWpU|xjE5KTv6>|Vu@R)2vB)R@qE*efw^>>fD8BN5xdl(2-OfZ)p8^ZNpB ztmt!nS~s>1xj|gnZLe(({2+Lu{F?@(65N>%f2qOzX9y5Cl_~pU^GRa%c<3ghw;iLP zpD;-`6_e1!aj*32$J~~-w+F(RkYqWm$*V0faE7nhQq0$F*3G4{UL(XJ$Rj)q;BeHixFWe6WW*w|@+qtWA`6pKjTR)9 z`(c%x>LiMzsf0HPuM(6TvOSfT9!ANHME65HFt6wlG>dr?>S&oTj%JRxwb$2ql0UC{fQ4iMA>*o4VNT4LOzQ z*a~Um7#sa8juf5#C|}p`+}xKBYbH~!RTL7F{-tJ&Ea^M6*x{jgc1FfCHg}I?svjB; zR-d^O%3=tqs%DhOwToZc0pV&NudiQwb0q21J!eSHcCadp;dN(LTx+)JOZw3;+yFBHLp{v~4M*Qn6yhz~H-J_sUhm!;wLe7G_O=MDBa3D1%i;00v_i z`^RoFy?R-?syEx1qz+rxr2e31QL(VJK+H29 z|D~!5|vLAe5XK;iA}E|9<0++l3_e1%;NpFCUAo78QnRGAeSfpnjZ42gsC4HUVi5x*~nhfFOG z!`x&SZdM2M`787L%4*AW*`;x@R-q_H7cBXQ-fq81)k&RAjzZ1M2^OL5_6ibnjq%U$ z13o=*Yt8JBDAaXC`cO{0HO$ZVow4b?t!lZdY11DP%n3RkLfVLDJ9fnh z=67K|)os!_KWlw^QUvToy?qvs(|DVT3OnzD1$hs(J0L!WLbvqK#`tqc`1sMgrp&ur z3B-AfR+~DRa~@=49x>C~#u_^nUUxcRZx1wn4yk0K2V^Q$)Uovel{(j}Og6Xd$iWtU z$-UnE$>h6W;^sx&?2F@?jIT$gTcvQ#j4pi-g6qURszQCk%?1`ZT=BbaScmu%UM1kK zNT-%lK}h$VlzroF$~#^&vdbr&<`Q52sqxJtB3`SV;RWF)li6`A9%Sua7meT>s|R#W^dug{UM_$Ac&{8wwk@}1R1cABN47%-P4nRhUWKhsoUiH;+Rk1d@zh-t zF^R~lJ!jj`rigwLxL#tAZ6*`VrY37?Q>Nx%I(W_0Om8$Q&qu|kH2fi{;UoY&{>{(4 z-&`h%YEcv7`orB@eTNyOd}P&&jjmsUZvR7NhHt%&qA1>2AZS5hp2>#ZHMPMKUh`2H zQ{cWsx_tfwd$$mg{$_A)?9@%6K!P?=`kYI1{SlGOqr+aOJ4eoUZp-87xK} zM!Yx5E=GsHceBY6jYU(XDXGM2@j^um_beZm!kAyc`!8;Pys97Kuy83IF4VJ}s8Sf< zx<}qhUm@4Lp)93daJ4V^7c0D|NS83?|m8Wwg&A-V7^`J2H>ITpgvmpI(nByyj?s z8!6$_z&w}fDLiXb(#Js`g$B`o2`)3fFn|v@7^P|vlS*M9pl8j zb!JOh61|^PSkF<(lzcf{+ryzyd49{1!{)}mcV7~3XB{|D7QYOEcNC(PinboQAG3RD znP0s-e=GmfI!oLu-r8`PJE!=jZJiGY3U6AV7fqXDR*S-~&J-86e+6=%(sE28-xZAx zN?=$IYev|}qabIs*6;F5cpm(4-YPPwSB5qsO&H)OL+a$UeQevd7N3?8#G-lHFFz_! z36dhbbI8f-i^*lz;phTT@AW}N<|&CzldEbA4&iSn^R}D!oABIIyaFlj^QajSm-Lbl zhZxhKB;OqC`O?Pl?4A#A`F&%o(-}dZs%l6&&AFB$T;+g9o(|c*vu!d?GL?P4iECLI z+8Zg7{_&85w2-5F#5(@{flt@v}{qS&dT9&a9ArfOoHv9yv$k zwHYCp>KKDER8=A;prQ?KwCVvjIq3fRE8R&9#wSkJyGQ7A(w|78a_RE zU)`YGUH!SbF|aAW=o|U)#i2+5a9}lnvwO zYZPq)Q{?-XrFq&=#)qs(77NvV?Oo%50`xc+yQK0*$?{E2(bCQM>%9{kfUl6+FLZ@| zLr2Gb=~jD{+V6=TS<4Zdw0E}kx6RASD~f!vrend*-f;m_lB!W?*5GpPI1O-0+uOS! ztm$1l?+sG)(+ECMx)5|(*iEoqcIN2u5N)Q9rC2qMeeD^NGl6H?>1_a85#VhFUdyFI zBz{q4cquiONN~2kel0L`V0j!4t$QS96)?eS!acVBfHmVRs{}#ld5l{Q3a0Sr^J+pIdN@x+lZd?-Zh)PwPC@6(t1YS=CL3j1`s@C^PK{&sLHKM}E+O4Cs;reCs)L z!>-h2o{RB5{XE#_V{Qmy{*G6J6f{$|hj(FUGj}t2e+eJ2qP^0Sz3IPC#`E>;^pg&b z*7P2In9cMp>0#=lQ6=m$ot69eLRuI+C zI~!@_f*1v}I##7|sLG4XN(=vZd7WmPVTz_72KsCZDK6BEVd2tNG&;t01C7(1yC&h286R1^G+lV@FlGhr4aVmMEipi_2fc>_m=3Fcw)--lMAq(89hQk;SK5XDG*JI8i8 zg7bPuy-y{nSoQmJ?955xT9=Z{Ry)%w4Cf(&a1*XEVen=&4AN6!5p1gS(}+3_I%Qqw zJK))9d>eTT(vkG1 z8Eks7#kcl!nfabpkfvWqsbXM=LX^L5Gh>d`s=7;R26x1<#!(4wHN>7_Td}L&%8&nRd^v{t6}I>^X!(}*wRpvy?Y2C?5jKr}wzzNlc~7q>H!abXFZJZb z#IxT)=Y|m%)i?&V$3F`zx7Y3|>|U*i8FY_ju=7&t!tUjA4y;74fX7t|tMuo(x8~5r zyh{n$;({|gBYh9#5}ra_(@Nq`i6n8gwC!U(VwPO+&*u0yWX$tte5j^Q`L9=WU3q?3 z`J=!0OnXw>lZaBg0xaICXHT@~GYeg3YI;Z3XAWzw^HdOiEb`nKp(2tYq`G97jOv_c_t1ew~Qg2G3IU2ny|UWRu@CGunbT9xl3AHsCQRe_c=boCOPYT zC8L{}SouM&5;~8GSh?3830wH9>CK;X>?&_Vwz|e;U?Y;2*{H6qLfr%@VapDH-O;%2 z^ZkHh>01|2@+==FX}{p@zs47$lL}@GrToeX0&Y%i$!<>dLBgallFOJ4Tcc{>Uc_Ur zErg(dJwa@_q5^tN9Gts0=cgQCMP_WEJfVT8%s;|*>o;t`A$s*P z0Uei4?fS2Iie;GZD$boh;K*nHkvgZ=r6En?6I=AwAgp$-p-({jJBW%^aPtcMmkf!l zj&Fh7N|%iP4`A`P>mdjNgM)tKh@jv8A!4GC%l{RB`+A7c2tGl5BSAA7H6{%LNy7yV zyzbyU@q#pmk6&&fzMg+|`VBvd+2QWE-SxJ|3xcH~2x;J75x=QJF%3B|5QG4Nfp9Pg z28BRx0)dxk*I$t|B<(l;b;I71hN%5|Pnzoo&1J;K%bI4V=4Tu5pY~s%nJ>;3OQZTgNB|9u`%e*s!(b4YAXe~C z4GcumptfJn1YQ240U?l|L;Wm&X>c%-#ti;l0|I~2fPdE@VC290LZDz81NZkhI2cAt z&ObCLn3l%BYY-TXp8SUf2Sff92ZzZ1TSL%h`j5V{KpNfmcMS}bh0@mdXZi1ZX&U^W z>q681wFY1y0{YLn(KOlrHJ&H#4vhoE{qo~9vd7c%MxzJ~U0rF^>rb+kmUdkitSb$H z{qtUf2Ei&|Y_WH+)*xFf1POtHWI+%x7zBqStwC6@tt<#6s|5J}L;i$lJv?b!`qxGP PgMeTtKvYyyUkmU*wr+UP literal 0 HcmV?d00001 diff --git a/tests/learning/System-0.pdf b/tests/learning/System-0.pdf new file mode 100644 index 0000000000000000000000000000000000000000..a4c1881c96201e910a6a70eac0b02c981b0470cd GIT binary patch literal 21809 zcmagF19Tvp6(xnVg}FyYz-`+xVZsz(#AHXPG$g> zFOedEPSo7W$=KoRY^CpHENpCOYh(=I<%M!|axm7nhH?Y$NRx}fX+j*hdPCu!#GAGG zFx902tqy>U&H;0vkL><6oD5rGcqJ)*b8%aCX;NcaB*xXI0J1D!7A#Y%UZvAPdFuLF z@xB56DtC5H_x8E!bA8&g^27FgcuVy45S4E%;Eg5vX6t%#kQ0_8(agJ2C?mYiwQvop zQNAPU4TBKA^Uzp1(g(wmcEh25rXc49W!|J0B?P6mu5)6Z#=0Z#qER^{akf5{BI?$o zu&POe`s}1Kt>Q|{)Z@+j&}?gkEn~rpb13AQU-Z>(ohy^hw&Hdy&K@#aZZcJ9*qmHR z_V8iS$KLadeQFK=!u8?LeH1Q)EqvGJGT%6SS3^k5#ZDh*f2Hq-`*RPDj~UU&dn3=r z*7DWGMfN$q?Mh`|&2F1i^fQx`C(d8auY2S>tYRtk%ey zJx)9#eY5;)!g3*@|?HZ?s%IfdI+cw_R~m0rXFhBv=O zygKWI(|WfROG$(Ksm*zt7EBEL(d^tu?SE-iuohNFO8N=K0?9-F%!ofP#%LJO3ZAK9 z$t=-a^#Ivt9VG(+oC@nzj*q2wu3PLxycDKYct=K^32*!^O9lJj60;9~&$ z9Kl;o*D*6+L&|1D2t1of;V&{FGeY*$$aQ-=78618Lgv63)1T;e4tEIm%+0agu1U5L z{j-s7!`!l|Fg;>AFgfRpJ%}Jz+kq$(xWLE1rjUOk_}&`=#06?2{YZQFIk?GN6Df`0 zt6CYZ==8)DDGmr!bfWt^$+ApsutPL(O2&6C5pO)?_#>N@r4`{fb|cbMDKt1>78qSlIGp5%gM~X@ur=lpqi$dN8M-J#L*0 z53(|%21^_@>n*+g+?O&V{b!LXTF`MBk%d@U7aHG&mfy@sd4D_U76AEGV;*GRdR(eH1}Ujf(Rk9>xjWq z>^4la=wbcHHwna81!vpy8w)^-X}8^xv@;4Z4=03w53+N%rz@0_sD;=XA<$8mFTjZ9 zrv09zJ;9JWj>?yX8Dnl#m)4AcnFf#3R-^>=Ne2(DL0Z*XQjFLyF%}ZFe!;aSWVst~ zX;H8kLRJuNXWtKOlK+uv5t)tbJEbfth59hT>~)6LY##G!3fCpGn#D+X|` z9w--Fm-j*+_wUFU!6HEtJ2dlZ=-hHR(`ExV2SNd&{YyA(ahS!)vYCtcA8MGIKOzt~HCapB1ast|8oZ z(JF4~?6*oBRVkm@BKktZ5!ZZT5gg}ATUe4%4nY%SC34Y`vkrguslv1@Y z7y26@9zPmAozeRvkj~p}SXOQk3XFUdq9h+{bc$R1$y8l4R2~9q4Fz?O| zCx@zvC*d_Cm-gC_fLB4%2$?<|9^8-%dy=75-fD_Y=P+aSSg6%j6C1D8axu_An)ZIr@HDacKKW?Hd4>slT*U z=2-QoiGzEcCjpD~2Oy?c;3kaJ9T7D8629}4b(9v0zmiYmb!iUvR~zgGAuM!d$1u5R zqn!FN<9IkS+VmsgbE$)_Nh=tQBG3LDjd5ZAqM$oe%otnlXMv(fOaiyUNxE%f@PkzX zDVx6sQ)Slfj21z$bGk!_P#$si^%N8(mPQmg3ic1!XQ1xKu2OP%KuGNies}ttHJ1wAT{TFbm zRX6js>)#9F@S0cmJq*k5R_@^%)gw2bZ?d8-*otN=G)*j;%Dg*FI95B$OwPD$nVhA% zZZ}XDq?CG`6u!RP%Gg2f=z_dq8>0o{q7EkOz zb=LnmKo0tq*x6&+!W9cq50~`IshQ^#tAi_&94g%nQiNJQ2Rczqf_4hpVHx>!8eEtZ zw*i)5CUB!DlDIULcua*4%KTDs%veC2Iml$Yt*1ZT#Gf$lD^! za}XRk4MA@W5BQrYz$D6o$W>1G{14Af%{Ygw9#-_mCFANKi3i4I#XBAe3l|}aF(W#n zhM|+Pgo6Ur>84L$rF2d{C}SI=|A6aP=`Zj8h1|bqMm8qqzpek2{|5^zyW1H9=;ZZH z|2aAs+c*K3{^eptV@F$O2SZ~=0PDX3AzK@#uXabkUzYv?XBlH7bA3TuH-IMnmw{F%wF%1SF}AyXY}9n(mJ0FpZ_kMDq4E^$;)g)oVq@*u&1 zq%Oi>q9}+Isz%D)z+xeUh9W4?0saO{-)|7)2YVERMGiJ1Ex%t?u-tB@y$`8xtsFHi zt(abUFSP>6zxjjqI;jEwWX@4TxjgI9LP8$ve+0om0ESZo?%pvm0Tp`^2AO~B;7Ls_ z79Xs8xTO7Q)V4<5B~R+O_~{!*$R4Iihcl+s8lzA$@mF6dY44O7|g(k zmh(NgoN%wd%Icx$kg9FuR|@HV4wm{PW;=6MWSjhE8p%#xBxsJ-FOqH3JTD024rPfn zpo2&9xw)qdX&`!(RNe6-(j2QWMiCUu%di^|Ajrgij&=vQh-VJ7EFiZ+)BSMJFbRV~ z#HC;62gk!EFV}TS&jt)siG4CVp>EPscl9vQ^yojUY?DQRNz|=V&EhU9a4s&?7Z1&z zyH>XFe3agKhCNXZpL^XL<`W}*)EU{`=+{BVDhkJm-vh(U+Jd2d$sjO&GA&WeoOOY`gE-`yB_M`vd4Pn7z7gB-d4PDIX(BK{4R^MiRm4>}#xQz( z9KGN}YlYkFNDB}u9X`Os7&(ySQe)5OaReS^?f=ASeNbWnh;83p=2BKaXipS$|eLa4pau-LQ7hFh9lkp@Ms) z=z%x=%pv{%AfV<7l*Xdv!>0-^B4BGkx(f-&v!i?)5DJS$i06NvCN0LG3@FVJlIOW2 za>8nb)DC$04KWR127d?k4n+I~GN%u_4Al5@rJ4%|Qml7uTi6XX0|LE!ZAY>hjTf$| z7kvl&4-j>bZf^o4|1jjY8BsMz>}z4cST$l`L;}k=!hDbtk+fL2eC(De@i_baZ{Q(0 z`oA#H1~KW_Q?6N4nYzrM`)?tq)vaM3%`X=B zeWyI930P@ZomiMyDp)DZgDjY7HfbYi2WdylL-l4F&<&s(2O38WI`m1Ym*dJNBy|xS zITe)`MKm+K`5R?C6^6xiYNmOzb(6^2@wA$30!>;?O0I%(Qgmu`x>X8SIlKzl71{Y+ zVqKcPqutYhRlNv-^q%6J@SHCFEd9J4L!#2SvKh)ejXbVFcEd*d@N3>ZP15SHOo@1j zs1X^Z<@rrzz1FGvDHzK`Gg9UUDMXa?!=#9DUrwUU}~6&s5Mn5o!^cXd7s{ z)ECrBG-uTK>J;i=YS#6q)h~Uk{b`okhTcQL7JG(ulZ6|H4HYFcTP|G}-dt(g)f;GS zvTaija1Vs9q^}GRJRxo&9dM7!PA=qaf!+&!e+PMYdD*yK z`g8QM^eP9+4qE>G7%CrB6jT+07n<=q5|}e+Uk`9kn?F9?tS+(EJQ|4Ln~+No_jeW; zZPXV>3|eaDa?*46W)(LdH&;*^0})}N{xp|b#ZjZV> zB_1|D?iRvl7;+n4v_a z1ggXyh@3#fwsWoSjo*GrVr*g_1yjjP$%J`e{Zsv%Ac=l>HI+6K4_OCYA9EhPQ0u7q z!lepwF8@rNG`XX?N!zdJ59xdHNaA#o0Cng(i`q?7G2`q}g>l5gyZy9%n=#yx+mTlv z784d%XVXrXkKnf+mP<>xilD_xt+f_eSM8qGo%_zS7;AvF_{yI#(Xl9F>@Az{&3T32 z3e}x@W_4GGgMuxB3OgM%lbUc=16HV37aJyZ6&7nLggP_#24lP7yK*G!@n>7=-ano< z#vcMNItm^NUR}3cAYIotPFio(QVrB~R;DYrwJQ$BZ*p&%+J2mOs_JXn>%p7H7{$~x z7T0!2R#}KG6rJXu`eDVS9d?|0HeF<{4y`S9ZCU%y0Y`lsfZ)YL!D;s$ew4SY5U?K5 zU*9S3n>x!qZS6Ak)%6v=5WkL_5(^S580w92Uf`NvVk>4#WbKWLAFSE4AF_Slz8CUD zE<2HD%4>N7Whdac-W__vzsiE)UU3iiEOUJdVt;!e)xp5&6Uqxf9CzQ5hLky@J^ZaoIxzW#6Q{s%+blSzX+=c(EhJ3f7$!LkomvS-0}Ypw*T_8^A{sC0O+L5jU2z|n(<$K z{{xa482*<|A$=!(D_hflx!>_W!2jP^{g<@=zfSxE;&e*R22TH^3(5}8#{YWvcM|_L ziiis;sYy`@IGF2O$teHJ?!v~7h7RU-PPPtE4F5VtCt~w;gWcTb%eIWZp`xv|zRiD2 zM9m!>orKKv9RN&>Uw7&C|5IUL{F>emb0a6SeFmsOTxUV2^ziID}sNdIMvfu5dK8;VZi>nbrf z6tFS1GXAnhC*WxKH$CK_XZf2>{ac}7{0c0ckiMO`vAL<46M&6@6+oxtWNfVp;P~n$ zZLI${*ic{dqEq`%7ZwJNf4hhS*qOhyx|seGTL3fjSD0n=-TtlpcD=U;yOLPM@=1Vr@qMepy%T@bV zvGyK`m4vUnf10=iELVN{w56sp#c;VEj&qn!axkT%j#Z{hQRgR#3#WIOEg})}`q9O< z^LieHn~~byBj=_&EX0?g8{bq~LMn82zNDR1jqV5VAt-(vwzt$B8r^`pGuq)nF}iDn z)$48MqLwi}cf;hTSE+MzY2u&r``qQKm(^a8;S=I_xGd(jq(1C{U%$lvRLfLok=)G(` zN}n(_UA6qshr|>{kMY(}?)6LAC(-QOT5xQTgh4Zk%H^r*c6`Yxu{R2o?A*G4hqXAA zX)k<4UK1-6AGomh-9Ls_#CO2)lki?e;)wHBj7ywjCC+u0tZ^wSTIVgVLJ^5ECX6Q3 ziL8d+0r*RsNVXKKAXW*Oi7W@%A?%R*MGI;+K@bk_lo zFVQ{^Qv@>m3loNpAq*0Y3pYdSWBbe2?~z0xE&_0b*`t0FrR`g`6@eH&HdrG`y!u;? zinyqfa^&C|sjF^B&1_09KhW-Lk4I0q zGuW0wP;v=frXWGA)xL=(21%e-R>+l@eDHUBZVZT*F779mJd71@mCChN6?>*fma0?g zmmoa1T+A%d-?(5Edv4TEsqTu`hJ-*Qe)!?Vr=K0ma?U=)v8xei{UwVAWC%{z>^DCcF-{U+&51X0 zve$Z^v`{Pj#h4D6ct{u`cWhgqbYVnNKs|t3U>ev1eY!0lRUf+7C6E}T7^Ws1!8im_ zN6andPG}l|6o9cj!`fb$fNLHB9|Ln+%UAOw6duDj;9ViVsxC$31?x3HTvsG5;-&BW zpm7}%(?pb~hlx@V)g1E__2Kma#FUt4yax^>0I}Z~vH@}r(S&e4=(iz+y3blhC~_ta zAf%@caZ$7ra?t*~rwp*4ZYbHj$X|lJA|S!}5j^+K-{gucJ_J#4b#>XmQheW%eZZ;T zBZtpT-=sPdo-yz(s3?F3@#c}dB?&8D&Cj6gosq1)BO50RD%j>08|oW^8+8OUk?UEc zH66{%L^a{31^7j%7C3$4D;D4C@9C|*lN)J^D(qJ^#5JMKEE84DGpVbrVVdD2I>c8) z>xa$FP$s6B;r4HsyE^h&6S~aoo_F>0R;1ki zkWlwY?PK^yf_Z#5xUlltzWqSd(dy> zK8~Oq`dSPI{9RgxYR3MrrFD<)2(;zxbf=aZQ|Pl0#HHClEqu`8T)E9LjYm3;|BWN_5Yg_Fc3v-%PMeAiOtQpPwC~DF7L)e0`0{oBZgXoJsWEhN^f6v4Bh?GGGkr1Ua z&J*qmW#8;h1wI`7maOK@jA`6l9E%b%$VFsH%XVo_ ztg*oqeF*_bu$eD+`9>_9aNU@4U;+1r*Y1oq!x%lG|pl8d>{BdfqZcZ?N#~?hBqe}g+?3@vm7$8 zBf8SjlVE@4mh1vvJVr?K{ zBSq_X5L;{TuB;7tqFK=aH+%t(A!JDP-r{rTnD>r`_cS#xEs!)LGE_&;2^Bv?^V&?= zuEWg~!<)1?Jn`gACuCR$4i`yGhfZ_bLPC!|3OvB8@Q}*ZWK8De%-4B@(lwV@g|QX! z(^Zz{c`XEu7vwgbU!Pr7WS`is(z?0I_OGx=vR+eMRjyAJ-n3Qz`Q+6r>}m^7KLrkM zfHSt6gD%DtU5s0SbTkIuqy>8}kO+ZY!;Rc-;jF@28w!5tSk0*e|yI0J9aHF6MpujXtWDT_u7Qd zTq0*tciL>T5lfxA9$TKt-eKRQ6EmCKN}33JNyC(W#x zs}A;)jV6>K2V;2!LS&DhYr)|Js%^0tXUE zv3sBoaxadc%OWRJO1E>me>}2ks{qHGb$+p#`|6z8+){Ay=qKjf4H4%=xPfK-&inec zX>al6LyfGsBuxntPn6`JB(V8hda2yuu(t{~;Hx^KHl(j%DKB%n@hnD}cW7A}V&%ly zFht111`ZG%%-*=E0?{K)v6K!JEy>h56(@stnJ1pJT*Fl(yeBaycm|{bjM!Ujv$|BD zNguIy1IUl@75f++L(L&(Zc;3jLGc+ZE7GLQzY3+ zeA+Zo{B$uz2`55ksH77{cJ({_S;>DTT=>PA_~Lk)eR6YWZA5siPb2C}=MKuWCKet0_=iXILlV`Z zkku=XIYzdCZP$~jBY;=bb+*V1V$&h<^=x^r%!gwi5m1Eq>P`T5eDm!Zl&)W zvQunjYsEEQZZrjH7N|tjA|B09`K*BE=`xNtF5m`{1C~35>XBkCF>{n@G-A{|Q{>a} z{JQG%kDHvt)DT7dYHTarLh`A&uOPRY&^aD`j|KgU`V(!p#}-8Rn#g7@+BU7Y)Umt= z1wB&&?Qf|QO1&G^7r=W1#;fBUnBEE99m=4Uu|C6L#4-?TM%2(F6l)4i5TgtHnch|H z6-kFt@_D=%Wt^zQ#H^NikW+_Kc4DGV3UB-eWoD93Hycg*erN{W&_N)#WEO=k86Tl9 zt#9SHypEFWsdTGI>zY$$>u0bklQN$GpTbsomsHn}&&C|E`DdgRp)3fntk3DaS&b5% zRiR5GR0kl^@ofGnrV$)->Td~`h+=+(dpUv3QmOsjmeV}qLt1$xYMM7WpTqP zE|z>rTo**E{UqGj-hv>72&_mxC~Ucxcv^{)OnA!@S%}BUSjw@0$%_Q(VDG!@fs>f=_~u z3^8XE%IFZmhN{$o1Kuo4EF}|Q?!mw9Q3wtuVKBEJezXO}i2)U9SHKJZXGW}mz-`Z7 zia_{#nA34#wX8#fL0&B9pZWIWG~vRcY=Gw6j|9uP$JowTh_kt}nLSyD_;nGNS?3b$ z5u{)7SAI=&Dx$SSqr*d5+tlW9A9wd zT^WeaRUJrB7Lk7)EV=cWl|(4#57um)#LXg&KVqc*@?pfyA!VfVhQ*ZA0dy@=871@j zvB=J{F>GOvXt7ds2&Sc{!E>*JQJZxyI>+)gSrQe>ViqpIbu^m{Z=VXNe50fqeSM^A zE4DSy&6l%RXs-q&XQM~$9|j8b{M21YYN9xnP(FT*&RogP%0T|Ym4Q|fD8Vyt0tq^ZcFA!PBAEP7@* zvybguV@8(ATZ8xq*SV(9tX8wz5UgFKTeaKW-3_2|mHh)AMaEmG1n5_l&mS967*%No%*cfWvTqFB;WqibFY1o* z$fT@`OF=g`X)np7C!s4fR0)BWlHX?nBN3`3PDP3DcUfkhr|sj7l$>16mj)*?DO+ku z^iz@3nJh&O71M%vPZOd+H#Jl4Htc#o4bd`%$TWTRt;JW>uCnnDqS$P-@MUbWm)&~m za=15zl#pM#>&oqn#s=fu?oG;-zby?USZOO6##i=H<*1>Bs~aDt%y>X+q6sGqF8^6> zkAl{yX6}iSU_5zgZ`6}tOW~FEzn_Ux!ZLIpS(Pt0SJifFO1H{z^7iD90Qb?DYjSEj z9R|2GO=N*lFGBmZ@Ou=>^5=4dV3B4HQ6CsaM){Hcikyr#AdX@fH%Ev$gd@Z@h3^H~ zo)f-Sc7Rhg@|k7NHO}Mwm0}pqT!K0Dvv!g`dhQ^>m9BI&Arr7OASxUNxn^_ZzQgEp2=?U!B5Y8ZKuu)Q`i~# zo1gMLY)V`79E(5;>>6D)BvK8qNjKq}!#Sh%Umnxdw=q)IwE$)+DwA)IqX&q{>P*EQ$N@d3yyekQ@0 zI5!Dza>1r6uAIwZ1=qr}?DUn?*`A0Gk4IsP4dxQDHCkmXq}p^aqNx~SKh~hHL|~`P z3#}SM7BO}BDv@(II3m-t#=wDl(ldIwM%w&p*382um9t%kKo@iBOB`esT?tLzk?~Sr z4yA}vuMY*=$&X>_5Bp9Ye06=}V0Jnt5p@IA0;LZz3PI{T*tpUuV1z%n8l0ArGBU@g9X|T6v#igPHEn zH=3UpknR9&O2}W^^07nERuDBSF|Bq1OY!TS4MWz6s932rLoX&GaZl&fLi1xtM)iZ+k*}1GO5G-SrKG zmviTi%~`&j-N~K$+EO!&6vj++HE3C9nxENv4(b?h<#q0eb6&a7xMtqtJWT!}c|(Ud z?BfcmMDbz}IwF3WSsX%F5Q zXvEsz8lD$-f=Y;lYM}R67Ur)^qo7k=iJ2zP3Ti7CQdv<3(lv)`!rmYBUUAcfis~gaknC>HUYvIB&&DXqGSf*DFE*`9reF^6=i zS1fC71kMc2{^=kn?(Lv?K(YW9S&F!Xwr*6d`N0f;Q*y8|&Va{q%CMw?H%<6cb%0XkKft?u=?NZ(n%wbD!O65=TrnS~-`>B{48BPGQmfa*TTp7o13$5G z6%XbrpKDomh>MjL-M>^5cC?)7-Con4WpkzAo~BauB5;)8N2LhV!Kf&UO;=!lUgE4? zKY9#SaZj(9A>{sKR*(9DbgsMABl~Y>E4CpsSL>#}A$8(7GaDT!R*7lj`Jx2FXD}=N zg1XJe3Sg$`!9lGR zuaggEQ4J-aRBB1!DOtIP5sJ!P@l_)!4_`)`{AlJb9KSgZ{+T=#6Gk^=I)Ql-iz%FS z)QP^5jxqen9r1%32)92vuJ~M7bo?>^lq)?4IA8+G9V)u_4j4#4*Kz=-mi>7RwN0cq z2*QLu&AMODh_o(xZz8heyS?elYe##MIvAq&Vq25a4@J$57*!hJ5Jf777 zkspKv&9nl7D-)!-=om|3YDhKI1|t37lVA*;q|$cv`Fp}+R@|%_ZdOP&ct(-v0S6pB zV(chG!8j}&R2PJb)JQq3Obi(0OxIzzykG!Mj_yJMXoVH#3CaG5P&Y)AUnj}(yeUk^ zrrY?3fw~E)(vBvq{y2;u$FYsU^ld#8g5d522drcGev)GtV;F7oVzl+it5^kN2T6-6 zjrXbnsT9E$XZ&k!i-3g_mOiyJyYv&_MwM1M-)#M&yL_D^BbW4w?B4b8UYKRwR|@c- z;A6zE#H9*@FyMJ3Fr$qtb}|FQ8W|O`hwD==NK_YFg~G6z^<5^l7%~bIwS&?VuM85{ zEP27sicG8;2)W*2k~Ow6p1h^?E!VQ$Yc<|ot2mD8r5<+p#r2s4G{iw6W^DZzXAHNfyqRYkRr`MYn z3hn&L@)ED&X0Rw0VMxPw0WRk<8`oiUsF7OAwh3NGai7tqrH5LXo98zIw22vQ$B8>RXdSfG{k73~{z_sF#|cq6?$V;)<8+`2K*QqLKt*6F-<~9= z34Sv9djNY?9oUD@LufjhMe|G+1j!EP!w7))c5rE!NR3GvWg)&|LB?O1EiYm! z;;|vWY9vWgj)vvM>uVC#oq%)xVd1<u>>qARTvVS`g8_=_?958wW49=4m^n?Q2V-~whcii-mt;o>kYOIDVeu^4h z)KG2*qeYH0V8SA+O&QyV*yl`c_B*u$%7dY}U_B*bBJ<#Y1ISUPgdA_oGw{B}^ql39 z**nvNrNM9Wsh4OX<)Mqkxvh5VFr&?N#!p#p%9gm*eIinmn_i#%93@xxJN45Egho4j z7}pE?-iHO_w890>F}{!}#~{1dGxmsawCIts&pmM8HvN_zdU`-#=~O#cY`~ zU^(*0{*(wvX)K2r&JJqLdmTbhQ#mX zfl*g57LsTaQDSDc0AZuCoXkU|CD5SMk`>4J0oE_oE?!i07dIbOg+m1i=fCt(#x^w6 z>&@p$;e6pYZzl$uzQYkzEeygYEyNKBLC>QA>|$u1|p<5cOr#&bSJ393CZDv^)L zyu4bbYQDV2-V>U;j%WJ3L}j)Ga%u;5uP17F>v%lIEqQ)C4S(XA&Ca(gjD5kbZv!xA zYxMd<$;yerASai;U{YYOZFFTb1E!GUvLz=zpQrQ{6QpK!hxOgby{=Tst5hxOv7|s2 z^(62uCBCzY%Bh&yjr@u?v2+{(IeUvw_Q9@bx_6&`mH$IgR@A(7N4{Gq_7ggp3`7q7 z3KoZ)7lmE%t?($%Kqxb%SD7b|qq*8%ux>_1C9szMr>=hL=q;jde>OQyNqKUa)X;FA zR1szx(htl-GF6R5O`QfW(|6jP;?jBrbsDPTyQHI*ccxeB%f6SuF{_5gHmfx&^Yq$@ z#xa*s(v@xotI{Ybr%Ebd_B{9I0V`ye8kr$$uXGyfXU>&5&-ce(mwk4=Jg{eWaqqYi zZtB$6aYa+aO!;N#Tc~Q{TqZapX+MzEDuf%dKroo?$Yp*Yuj16_GpPQE*iV7bVSibd z789yjajht^z4zU=T}hVuOStqXnJCx7tqrQ5AMMh0hrKiM8!8ixKjthjmQ3sQ+r@Nq z_`1rse_w`1i#RoK76!!narm1PJltL%1Cp5-#*r}gLxY%vmVD)_TA`Fcl79#WrGStM zJF?vsj~5k^+20Uvwa9kUO@G5~N~6*+tF8B>V9fXont)ajJl=4kK&q{mFN!!Af>3r~ zO7VqI7W8sdawQKaIU4{DAG)|7*o~L94fu6Lii0J=;`l(beBWDqkOx8S^=YA@(4{|d z&mr;IAZEMu=MHq?c+8jJyzORs7xN(EaLSW9%W-TzZmJx+$t7dW1Qrb?t@6Uteopao zRt9#pT#8o1`DMK|0%9g6ywBo#xpsyoC8q1ibi4lh30=O8yi=^~6ek;{wdMJ|P2oDV zI>=>grheTfezN`O*$g)O%RNYvM=Yt{d0Z$M+@{;BaZN#4NZOeD&pu&n9QD>2x&mm37oLjT+h2=M0xD7d2|_^1^(^@ie}L z-ZkD)+O#R}P;h4ELOWHb?pQa)ZFi&Y{DJS^g_1&QRbi6Zh43cEYH5|V zj&|dlJy%kN9~l{3V*~P9rLB4W_+&w(ayFQIl!s1*QtTbNL>CpOsB4w$xQ^x>uZlie z&d1iW-|1_qs!d*ki(J=Rw`E%=AKwtugk-vMtS&0WgrKY3Pbyq>Q54OH7tb;B^8)s# zOIG2K6TV)!leElK!Q93}=!}$Mka3WM)15dZcZ{mrpxLP$UK{1Ai~%9jCfg|T@`cVA z=fIN?YmXBrRNJfg&I+0$+$em9UITI_ve{$xv%zmcQ>=9;kd8^FPRE+3L|T?25JP9F1*a{43N}piiK(e^>(QN*f1zI8x{^o|331u)AAO z&(qZDF~*gvjRyHhVADL!Cb*Vp++LHDUq@FxbGuq>4ys%?n@?xf@K(GoudS6=+3hjv zdLp*H-v3-s(+|T6RKy_gq#kQ8#CmWtmDsql}1hoB${1Ckxbo z1fBHhrh0 z-CeHW-2%0ZaQ$AHA9A5Ne?h6F!hyniGUDU5E>I&V4n34OdR^(yb?NNa=ZzJa0w2T! z^ONecH|Zrw4JFaZd@whDlz5%J2hueNWsu6f6n4*_U65!Ks)4@>HBtGY5=GjeT-Kz3 z9UfR3#w?%}1B*t#sQ?h{yx{?Hqc{O7waa~^r~2QUqy{g1=UCaCi?q|usS@qK z?c#-!ME8FLP`a0r^u~?S1cEbnYJxFSEeG;vi)f8YiSwG`D2~>IKG-#U-x!)$fpG7P zfZk?$Vgdd0eru5DvySiPcS+{HPkjlz4P{>Snz@Ij7Bqy$sm?~N-)y3XFGN>%QF(az zgt%mBvyj7d>I{CKQ#yl52$#td*oG4p-(YDD;#C`2ri%KFa;K$&JVfnVAx{=mv6J;$ zV~c-NdApOSnf!BpE0I_}1RI^U8pjReQI#~2X65=@pUQUVPD$A8kL^$`ML~2+8k4$R zS{C$HI+mq2=fMnq*%FRR+9;KDh*mm=7$>uyaR%_*#Dyt|Gg+N%tU!f|-3Cg(<6Tv0-_J!%q) z6=q}pc;_cM#pTJHajnQ`v>?H)$=pBdM~s}6keAb=zPyWhE8m-UqUn~&(SnVx%qtQs zdI^?)&lCz}$wsi`m9MwE^#%JL>bz5d@EFbl#AlXbS9nuVJtr8CPr+`+XTWD7w{{TP zvB-;uow<#{WrUG#F?H0j($`Mb7Hi#(V+T-ZC5(w^>TfRBm~Bsd4O{Syv*RI~6kNLm z-|NPlQo4NFAAC^EhQ|t#X9(|RSq~aVM@VDTx zm~w5m>qBHyN@rFr4!E~_-Ou00+c}kpzpFZnC#6^Y-3tGhT1ZMxRiX4PkhC*@HAiy?6(G7`% zFiMZ2-z&fmA1XGi4!SBN2BDjs?np%UpnEj`u|9?7+UAk8!@TO;*hY=5U3LIN){wUv zCq@t=PT{Eop%`b{%U^1s>^siVwhG5s&X%Q4PpWh^$sQfq9(P`Ki$qS>+%d|cI?qm2gP>+w&7MG`651v- zjY+gf@zS2P?$0q2IVkgnXHYx-Akk$9Qg0stv1QZ>Bu9;hb;cY42ar4v0rAhRGzu|Tyv0m&eyQ&31Vh5j0*x5ZFk`f4 zm++kr2Ah|DRgtVKbA z3C&`ZJ*f>hIexWfqk;XqgDO``RGc%bvU2$k*lnv}%XEQ;A=pw1;V4$`P$P1G*4vgj z%E_y5KY^R@sC_iWEOb7)#TUAx!}T9y;=0~$v%A{t%dE%$pF++%sEM^}-~>dZNH3Qr zMNmo%DJCKGjtvkIse*Kf(nCj(B1kdxPy|tWFCr4@MYyNWKkMEZvADS;=1r~<`gi>F0)gho8 z5Gt4?ZJ7YV)`_Z_&OM)rERB;Ur=(UgJW522JSYanYjs?VOyAd3OYn%V&|ZS6Sg84n zpyRXj9rTyyGK{p`AfA~T>a?r4=Jm%#jN9hoQ-R8Q$t(>meMpZLA8^uv5U&I1drW&6 z+=haRurSZK5V+3qipGu8QSeb@DPMb4PfDvPURXsrqo)60Ln%Xgp<#rlwkA4SO~7pS z#pML1K!JU7j8077*sQ<|pI@gwlX#Xo_2L`$h-kwp{`g1p@i>FC&iG~tfI=H}*M*s) ztizXj9H`86wAEg>n@})X{2G`sb7)#F1 zM@#vfFWg0Kh3$!xaN9Jy`V*<*=OzJ3x=iKu=Ss30B|=2bvp=qPdHKlq^-&n5_-53U z?|w|~qy;PlbYP`UNWDHvk0b11pLtmkPVV~ngpZY$B%}6ccxCr0-sl!ykn!~B^D*#_ zk)x>dT-8Xq9)khB(OJJOpCIRvhU=}yL>uH*axuQ&5qqd0`$>L7>u@oz$==l`ZEP~H z_deXfp4}+Gbj`-Eyehf1#G1W^>(n)OJs2&MUGH*4*Z?f~ZV%8MzR=JO%rjU+ccc#o zv)yOs&P~p7&V$m^tMaUD@%H*p^x^ia4&zQh4Qq*{&z`vFHy}v+%%yL{igGLcFYQmy ziZR|nRFrp`Cme{U605bb-{usIM0S#eL&`$7!eZ0oGfJch7ZzS^962lxqarT^U3HAb zpn2|CKAF(yT@y?!T120{a-|6Rac@?OiT0Vn{7%%}J?-vHmfJe&oy{M*)8h$yn+2xO zH%B?o7uVk0=QvxlymKzq8+MC|yZB6@tuUb4sgw#);7|~8xGs5b8Td9;8K6Bk^o$&* zUHu`%uKGgDaP`4yXIyLRVx=_wr!j9Ax$H`yqnM|y3$)$dR4O4g{6d)b(fkyCOR1C?sZ&7; zVPF!*F22CDsn2nrzvnvm^y_nS(=tUbSgL|bhBjX3Jsgf0KCN5w^g2LOZwcVvm3K)qLyFvuf^1*S#G7$gO;%(4@_xtC5o5I)skR zG^frqM>)G6CR;BJ%m}eFdx6?x7Iy)r*U4;xcTUgJUTTZ6H&ndKR@7{8@Ll;a)+UxBUP+texY98dWj;9<3tdo%eC#xJ}I-J--^~`FB05ROhtPCNZnC&s_(wNcy_n-ut1%W~)`H zviWX(WhyUS?psS41ve~QvSrBSHQrt`5%?TQAN1kg>uMe~@ff-xV6@haw)snV)i^#k zZH{h_Bcj{RH^bJd6aslMxjIg1{%)Cv)zHp+(VQ5Wdl3vl@c4JXvR--s)Z*}2&^DvQAQtuN})@yhvjux%KlPF4m!1f>JH zbSGaGE!$%s8}9GBQ_vm@rMqXQZuas$sC}oe2fpO*aDpbJwHeVqcaqilo?!|99_VM< z5HjHL@Y)%qP?Djh#tCN#XB#KUEW(N79H9)MY@^JfT%yELj$l32W9mdqZr<*|7zW_d(!h)GA6Gnhz#=#&Jx&5P$?}x3C7QqG~R#@ zA%pR@Eu;fQ##t&CC(diGOLw%8tZariT)vqE4YH2ox}i8gg}V zDIb1jeI7~n=<@+JO3`mcG4VJ*4Shnl-B&$13+wn~c;uZKe%E6hzviqiSkCHK#RrGXO{!&1 zY-G>lgFg2&G6)8Ie=KincI3zPfU!8w(Sfcc6|d}80MZ>;G|Ch(2abg14RC0eaF5Fv z0admv?nJy~h)kFl%)FWytt^zW^o3WpsqZ$7>BfXghETctCA!HOhP`~kxyOaq@ca9U z*O>egb;G_fmrNY5m@LcDJ6nT%xhm~#WX zs6gr*!}FyPdq?IE?4k)P>>aNNM;`f@BRS?%4v}R{21AR*!MEHD3!SRPSwM#OoV+d# zEy7!LK9Qwdd0f@MNeI7x)3#OaCBtE=v29BnPWIk5^+SEhc_;{pY$x9`AY!$W5TtK* zQ7T|iZelUB-dV9;?Oe$zDlMw<*xB8C&?NdUMag8(r%qdqxn_}EQCv~nR;<#68>ASW=rj-UbX(EG--4E=Dc2ycr$d%l3%$f8+P!`DIkvxb#J9 z+7pJ5X~)*Ud0Ep&?K2%EEq#!%&Ucup^C9=`hw(vSpgPdO}E|;b7 zdcM9@GhL8Up7b7#0~TDhv2l<44)q%E7*aCF5H=d#6-f~EGyBQ;qEw-rW=$>2_LQBB zgPs@Nu;nukI+5cxtC7h2IhtJFBtukxl;K7TY|;JQ0v0NYSyk)y47Sf^-A)%h-zE_55uf6F4zj+|S`d86#W zwZLrPVNtSc!KH(GG-#8=x#VI~9d=(gIe*+L@M zW#%m6`++OiPWyH(lNLaYj?Kv1%NJ!vQ-o8p6BG;t_qF?pr+N5fn7U%d_?6b(k4jnO z!c^xzneSab1hM!%e_^JW(RwVVdv5iTTJGp45BB5;pz25fE?@-krkLKfykZPKF}d)- z1@5&Zyo^f1UK%`MZS!nNYF`yNG3+7ibQ1@?HO^hSCO(QIA2!8Nz# zZ5`{PTXKuQxQR^zTgh(Y!JJJza{1u?HC=o{aD<8`OANU~Vqw3X%Xy2cFL*NL0w&>n(G!<$7ciY952x)PfTsL3vDz{?K_H82l-VR zDc}dr1oW(i>%;@%gNGzaCR}6ZIU>16abk(;;MhpP%&$I2*RN3c9m}(e z^`1JK@L~45D9uKcU55@_0kxMJ9)K29X8CxD!dk~Gb8+0<%q_T{bhn-{Q6j^4Pipx8 zfN%ejJwh%+k^e{ELjF?WhzP?(|Ke>)xZ*F8QiOz~@@whv>yzL|5Cj53@N2u7xnR8c zNw%7w;u}xQ-$j3cydpL(W=^*5cHaC@5KIyT{yXC@gjPgF3JQiuf}vmp1P+(H3^oLV zFOv4ZGyj|9w8M}{%b$Ic3^pWVj-{Ig$xQjPM^ZqUB%~yvwEU1?f}CGtJ4XkVAA%dm zPdyF^pmenSF{b%*{5LP@>0)I~0#qR=8WQ38=f;nK!(niKYyMv`CjXHpDCT8H0bsNe6G^h}}ubv=xW z83|`~`Kck*vGXRa8wrNJ<>*L4xJjM-5Eor_uy!Qz+`rCMNJO^`$*P1xLd>kJ!EhK1 wYJr4USt3woq(&uS5QrrNB}eoBr~JZ)T`{Cr`sa&4!gi4m8ew5oEj60|02%ncIsgCw literal 0 HcmV?d00001 diff --git a/tests/log/test_log.py b/tests/log/test_log.py index 8ced5b15e24..139075dade4 100644 --- a/tests/log/test_log.py +++ b/tests/log/test_log.py @@ -103,7 +103,247 @@ def test_log_initialization(self): ) assert T.logged_items == {'value': 'INITIALIZATION'} + def test_log_dictionary_without_time(self): + T1 = pnl.TransferMechanism(name='T1', + size=2) + T2 = pnl.TransferMechanism(name='T2', + size=2) + PS = pnl.Process(name='PS', pathway=[T1, T2]) + PJ = T2.path_afferents[0] + assert T1.loggable_items == {'InputState-0': 'OFF', + 'slope': 'OFF', + 'RESULTS': 'OFF', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'OFF'} + assert T2.loggable_items == {'InputState-0': 'OFF', + 'slope': 'OFF', + 'RESULTS': 'OFF', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'OFF'} + assert PJ.loggable_items == {'matrix': 'OFF', + 'value': 'OFF'} + + T1.set_log_conditions(pnl.SLOPE) + T1.set_log_conditions(pnl.RESULTS) + T1.set_log_conditions(pnl.VALUE) + PJ.set_log_conditions(pnl.MATRIX) + T2.set_log_conditions(pnl.SLOPE) + T2.set_log_conditions(pnl.RESULTS) + T2.set_log_conditions(pnl.VALUE) + + assert T1.loggable_items == {'InputState-0': 'OFF', + 'slope': 'EXECUTION', + 'RESULTS': 'EXECUTION', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'EXECUTION'} + assert T2.loggable_items == {'InputState-0': 'OFF', + 'slope': 'EXECUTION', + 'RESULTS': 'EXECUTION', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'EXECUTION'} + assert PJ.loggable_items == {'matrix': 'EXECUTION', + 'value': 'OFF'} + + PS.execute([1.0, 2.0]) + PS.execute([3.0, 4.0]) + PS.execute([5.0, 6.0]) + + assert T1.logged_items == {'RESULTS': 'EXECUTION', + 'slope': 'EXECUTION', + 'value': 'EXECUTION'} + assert T2.logged_items == {'RESULTS': 'EXECUTION', + 'slope': 'EXECUTION', + 'value': 'EXECUTION'} + assert PJ.logged_items == {'matrix': 'EXECUTION'} + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'slope', 'RESULTS']) + + expected_values_T1 = [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]] + expected_slopes_T1 = [[1.0], [1.0], [1.0]] + expected_results_T1 = [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] + + assert np.allclose(expected_values_T1, log_dict_T1['value']) + assert np.allclose(expected_slopes_T1, log_dict_T1['slope']) + assert np.allclose(expected_results_T1, log_dict_T1['RESULTS']) + + assert list(log_dict_T1.keys()) == ['Index', 'value', 'slope', 'RESULTS'] + + log_dict_T1_reorder = T1.log.nparray_dictionary(entries=['slope', 'value', 'RESULTS']) + + assert list(log_dict_T1_reorder.keys()) == ['Index', 'slope', 'value', 'RESULTS'] + + def test_log_dictionary_with_time(self): + + T1 = pnl.TransferMechanism(name='T1', + size=2) + T2 = pnl.TransferMechanism(name='T2', + function=pnl.Linear(slope=2.0), + size=2) + PS = pnl.Process(name='PS', pathway=[T1, T2]) + SYS = pnl.System(name='SYS', processes=[PS]) + + assert T1.loggable_items == {'InputState-0': 'OFF', + 'slope': 'OFF', + 'RESULTS': 'OFF', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'OFF'} + assert T2.loggable_items == {'InputState-0': 'OFF', + 'slope': 'OFF', + 'RESULTS': 'OFF', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'OFF'} + + T1.set_log_conditions(pnl.SLOPE) + T1.set_log_conditions(pnl.RESULTS) + T1.set_log_conditions(pnl.VALUE) + + assert T1.loggable_items == {'InputState-0': 'OFF', + 'slope': 'EXECUTION', + 'RESULTS': 'EXECUTION', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'EXECUTION'} + + T2.set_log_conditions(pnl.SLOPE) + T2.set_log_conditions(pnl.RESULTS) + T2.set_log_conditions(pnl.VALUE) + + assert T2.loggable_items == {'InputState-0': 'OFF', + 'slope': 'EXECUTION', + 'RESULTS': 'EXECUTION', + 'intercept': 'OFF', + 'noise': 'OFF', + 'smoothing_factor': 'OFF', + 'value': 'EXECUTION'} + + # RUN ZERO | TRIALS ZERO, ONE, TWO ---------------------------------- + + SYS.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + assert T1.logged_items == {'RESULTS': 'EXECUTION', + 'slope': 'EXECUTION', + 'value': 'EXECUTION'} + assert T2.logged_items == {'RESULTS': 'EXECUTION', + 'slope': 'EXECUTION', + 'value': 'EXECUTION'} + + # T1 log after zero-th run ------------------------------------------- + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'slope', 'RESULTS']) + + expected_run_T1 = [[0], [0], [0]] + expected_trial_T1 = [[0], [1], [2]] + expected_time_step_T1 = [[0], [0], [0]] + expected_values_T1 = [[[1.0, 2.0]], [[3.0, 4.0]], [[5.0, 6.0]]] + expected_slopes_T1 = [[1.0], [1.0], [1.0]] + expected_results_T1 = [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]] + + assert np.allclose(expected_run_T1, log_dict_T1['Run']) + assert np.allclose(expected_trial_T1, log_dict_T1['Trial']) + assert np.allclose(expected_time_step_T1, log_dict_T1['Time_step']) + assert np.allclose(expected_values_T1, log_dict_T1['value']) + assert np.allclose(expected_slopes_T1, log_dict_T1['slope']) + assert np.allclose(expected_results_T1, log_dict_T1['RESULTS']) + + # T2 log after zero-th run -------------------------------------------- + + log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'slope', 'RESULTS']) + + expected_run_T2 = [[0], [0], [0]] + expected_trial_T2 = [[0], [1], [2]] + expected_time_step_T2 = [[1], [1], [1]] + expected_values_T2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_slopes_T2 = [[2.0], [2.0], [2.0]] + expected_results_T2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + + assert np.allclose(expected_run_T2, log_dict_T2['Run']) + assert np.allclose(expected_trial_T2, log_dict_T2['Trial']) + assert np.allclose(expected_time_step_T2, log_dict_T2['Time_step']) + assert np.allclose(expected_values_T2, log_dict_T2['value']) + assert np.allclose(expected_slopes_T2, log_dict_T2['slope']) + assert np.allclose(expected_results_T2, log_dict_T2['RESULTS']) + + # RUN ONE | TRIALS ZERO, ONE, TWO ------------------------------------- + + SYS.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + # T1 log after first run ------------------------------------------- + + log_dict_T1 = T1.log.nparray_dictionary(entries=['value', 'slope', 'RESULTS']) + + # expected_run_T1_2 = [[1], [1], [1]] + expected_run_T1_2 = [[0], [0], [0]] + expected_run_T1 + expected_trial_T1_2 = [[0], [1], [2]] + expected_trial_T1 + expected_time_step_T1_2 = [[0], [0], [0]] + expected_time_step_T1 + expected_values_T1_2 = expected_values_T1 + expected_values_T1 + expected_slopes_T1_2 = expected_slopes_T1 + expected_slopes_T1 + expected_results_T1_2 = expected_results_T1 + expected_results_T1 + + # assert np.allclose(expected_run_T1_2, log_dict_T1['Run']) + # assert np.allclose(expected_trial_T1_2, log_dict_T1['Trial']) + # assert np.allclose(expected_time_step_T1_2, log_dict_T1['Time_step']) + assert np.allclose(expected_values_T1_2, log_dict_T1['value']) + assert np.allclose(expected_slopes_T1_2, log_dict_T1['slope']) + assert np.allclose(expected_results_T1_2, log_dict_T1['RESULTS']) + + # T2 log after first run ------------------------------------------- + + log_dict_T2_2 = T2.log.nparray_dictionary(entries=['value', 'slope', 'RESULTS']) + + expected_run_T2_2 = [[0], [0], [0]] + expected_run_T2 + expected_trial_T2_2 = [[0], [1], [2]] + expected_trial_T2 + expected_time_step_T2_2 = [[1], [1], [1]] + expected_time_step_T2 + expected_values_T2_2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_values_T2 + expected_slopes_T2_2 = [[2.0], [2.0], [2.0]] + expected_slopes_T2 + expected_results_T2_2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + expected_results_T2 + + # assert np.allclose(expected_run_T2_2, log_dict_T2_2['Run']) + # assert np.allclose(expected_trial_T2_2, log_dict_T2_2['Trial']) + # assert np.allclose(expected_time_step_T2_2, log_dict_T2_2['Time_step']) + assert np.allclose(expected_values_T2_2, log_dict_T2_2['value']) + assert np.allclose(expected_slopes_T2_2, log_dict_T2_2['slope']) + assert np.allclose(expected_results_T2_2, log_dict_T2_2['RESULTS']) + + def test_log_dictionary_with_scheduler(self): + T1 = pnl.TransferMechanism(name='T1', + integrator_mode=True, + smoothing_factor=0.5) + T2 = pnl.TransferMechanism(name='T2', + function=pnl.Linear(slope=6.0)) + PS = pnl.Process(name='PS', pathway=[T1, T2]) + SYS = pnl.System(name='SYS', processes=[PS]) + + def pass_threshold(mech, thresh): + results = mech.output_states[0].value + for val in results: + if abs(val) >= thresh: + return True + return False + + terminate_trial = { + pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) + } + + T2.set_log_conditions(pnl.VALUE) + + SYS.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) + log_dict_T2 = T2.log.nparray_dictionary(entries=['value']) + # from pprint import pprint + # pprint(log_dict_T2) From a351091ed8b3400b05f7ebc1661ab8b649435f92 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 8 Feb 2018 15:20:27 -0500 Subject: [PATCH 09/17] fixing (temporarily) time_step bug in nparray_dictionary; reorganzing nparray_dictionary into methods that can be reused by nparray --- psyneulink/globals/log.py | 125 +++++++++++++++++++++++--------------- tests/log/test_log.py | 73 ++++++++++++++++------ 2 files changed, 129 insertions(+), 69 deletions(-) diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index eb805c99037..82219e3a90e 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -1295,6 +1295,7 @@ def nparray(self, # Insure that all time values are assigned, get rid of duplicates, and sort if all(all(i is not None for i in t) for t in time_values): time_values = sorted(list(set(time_values))) + npa = [] # Create time rows (one for each time scale) @@ -1361,31 +1362,87 @@ def nparray(self, npa = np.array(npa, dtype=object) return npa + def _scan_for_duplicates(self, + time_values): + mod_time_values = sorted(list(time_values)) + time_step_increments = [] + chain = 0 + for t in range(1, len(time_values)): + if time_values[t] == time_values[t - 1]: + chain += 1 + else: + chain = 0 + time_step_increments.append(chain) + for i in range(1, len(time_values)): + update_tuple = list(time_values[i]) + update_tuple[2] += time_step_increments[i - 1] + mod_time_values[i] = tuple(update_tuple) + return mod_time_values + + def _parse_entries_for_time_values(self, entries): + time_values = [] + modified_time_values = [] + for entry in entries: + entry_time_values = [] + entry = self._dealias_owner_name(entry) + time_values.extend([item.time + for item in self.logged_entries[entry] + if all(i is not None for i in item.time)]) + entry_time_values.extend([item.time + for item in self.logged_entries[entry] + if all(i is not None for i in item.time)]) + if len(set(entry_time_values)) != len(entry_time_values): + modified_time_values.extend(self._scan_for_duplicates(entry_time_values)) + modified_time_values.extend(time_values) + + # Insure that all time values are assigned, get rid of duplicates, and sort + if all(all(i is not None for i in t) for t in time_values): + time_values = sorted(list(set(time_values))) + modified_time_values = sorted(list(set(modified_time_values))) + + return time_values, modified_time_values def nparray_dictionary(self, - entries=None, - # owner_name:bool=False - ): + entries=None): """ nparray_dictionary( \ entries=None, \ ) - Returns a Python ordered dictionary. + Returns an `OrderedDict `_ - Keys are logged items, ordered in the same order as they are specified in the **entries** argument. Time/index - is the recorded in the first three or one key(s). + Keys + ^^^^^ + Keys of the OrderedDict are strings. - Values are numpy arrays of data generated by the logged Component (specified in key). Each item in the array - corresponds to a time point (or index). + Keys are the names of logged Components specified in the **entries** argument, plus either Run, Trial, and + Time_step, or Index. If all of the data for every entry has a time value (i.e., the time field of its LogEntry is not `None`), - then the first three keys are time indices for the run, trial and time_step of each data item, respectively. - If there is no data for a given entry at a given time point, it is entered as `None`. + then the first three keys are Run, Trial and Time_step, respectively. If any of the data for any entry does not have a time value (e.g., if that Component was not run within a - System), then all of the entries must have the same number of data (LogEntry) items, and the first key is a - sequential index (starting with 0) that simply designates the data item number. + System), then all of the entries must have the same number of data (LogEntry) items, and the first key is Index. + + Then, the logged components follow in the same order as they were specified. + + Values + ^^^^^^^ + + Values of the OrderedDict are numpy arrays. + + The numpy array value for a given component key consists of that logged Component's data over many time points + or executions. + + The numpy array values for Run, Trial, and Time_step are counters for each of those time scales. The ith + elements of the Run, Trial, Time_step and component data arrays can be taken together to represent the value of + that component during a particular time step of a particular trial of a particular run. + + For example, if log_dict is a log dictionary in which log_dict['slope'][5] = 2.0, log_dict['Time_step'][5] = 1, + log_dict['Trial'][5] = 2, and log_dict['Run'][5] = 0, then the value of slope was 2.0 during the time step 1 of + trial 2 of run 0. If there is no data for a given entry at a given time point, it is entered as `None`. + + The numpy array value for Index is a sequential index starting at zero. .. note:: For data without time stamps, the nth item in each dictionary key (i.e., data in the same "column") @@ -1399,20 +1456,6 @@ def nparray_dictionary(self, ` of the Log that have been logged (i.e., are also `logged_items `). If **entries** is *ALL* or is not specified, then all `logged_items ` are included. - COMMENT: - time : TimeScale or ALL : default ALL - specifies the "granularity" of how the time of an entry is reported. *ALL* (same as `TIME_STEP - ) reports every entry in the Log in a separate column (axis 1) of the np.array - returned. - COMMENT - - COMMENT: - owner_name : bool : default False - specifies whether or not to include the Log's `owner ` in the dictionary key of each entry; - if it is True, the format of the key for each field is "[]"; - otherwise, it is "". - COMMENT - Returns: 2d np.array """ @@ -1422,38 +1465,20 @@ def nparray_dictionary(self, if not entries: return None - # if owner_name is True: - # owner_name_str = self.owner.name - # lb = "[" - # rb = "]" - # else: - # owner_name_str = lb = rb = "" - - # Get time values for all entries and sort them - time_values = [] - for entry in entries: - entry = self._dealias_owner_name(entry) - time_values.extend([item.time - for item in self.logged_entries[entry] - if all(i is not None for i in item.time)]) - # Insure that all time values are assigned, get rid of duplicates, and sort - if all(all(i is not None for i in t) for t in time_values): - time_values = sorted(list(set(time_values))) + time_values, modified_time_values = self._parse_entries_for_time_values(entries) log_dict = OrderedDict() - # Initialize log_dict with time/index arrays - - # If all time values are recorded: - # log_dict = {"Run": array, "Trial": array, "Time_step": array} + # If all time values are recorded - - - log_dict = {"Run": array, "Trial": array, "Time_step": array} if time_values: for i in range(NUM_TIME_SCALES): - row = [[t[i]] for t in time_values] + # modified_time_values is used for reporting b/c this list contains + # incremented time_step values in place of any duplicates caused by scheduler + row = [[t[i]] for t in modified_time_values] time_header = TIME_SCALE_NAMES[i].capitalize() log_dict[time_header] = row - # If ANY time values are empty (components were run outside of a System) - # log_dict = {"Index": array} + # If ANY time values are empty (components were run outside of a System) - - - log_dict = {"Index": array} else: # find number of values logged by zeroth component num_indicies = len(self.logged_entries[self._dealias_owner_name(entries[0])]) diff --git a/tests/log/test_log.py b/tests/log/test_log.py index 139075dade4..c4390fdbc7b 100644 --- a/tests/log/test_log.py +++ b/tests/log/test_log.py @@ -7,9 +7,9 @@ class TestLog: def test_log(self): - T_1 = pnl.TransferMechanism(name='T_1', size=2) - T_2 = pnl.TransferMechanism(name='T_2', size=2) - PS = pnl.Process(name='PS', pathway=[T_1, T_2]) + T_1 = pnl.TransferMechanism(name='log_test_T_1', size=2) + T_2 = pnl.TransferMechanism(name='log_test_T_2', size=2) + PS = pnl.Process(name='log_test_PS', pathway=[T_1, T_2]) PJ = T_2.path_afferents[0] assert T_1.loggable_items == {'InputState-0': 'OFF', @@ -86,7 +86,7 @@ def test_log(self): "\'Index\', \'noise\', \'RESULTS\'\n0, 0.0, 0.0 0.0\n1, 0.0, 0.0 0.0\n2, 0.0, 0.0 0.0\n" assert PJ.log.csv(entries='matrix', owner_name=True, quotes=True) == \ - "\'Index\', \'MappingProjection from T_1 to T_2[matrix]\'\n" \ + "\'Index\', \'MappingProjection from log_test_T_1 to log_test_T_2[matrix]\'\n" \ "\'0\', \'1.0 0.0\' \'0.0 1.0\'\n" \ "\'1\', \'1.0 0.0\' \'0.0 1.0\'\n" \ "\'2\', \'1.0 0.0\' \'0.0 1.0\'\n" @@ -105,11 +105,11 @@ def test_log_initialization(self): def test_log_dictionary_without_time(self): - T1 = pnl.TransferMechanism(name='T1', + T1 = pnl.TransferMechanism(name='log_test_T1', size=2) - T2 = pnl.TransferMechanism(name='T2', + T2 = pnl.TransferMechanism(name='log_test_T2', size=2) - PS = pnl.Process(name='PS', pathway=[T1, T2]) + PS = pnl.Process(name='log_test_PS', pathway=[T1, T2]) PJ = T2.path_afferents[0] assert T1.loggable_items == {'InputState-0': 'OFF', @@ -182,15 +182,41 @@ def test_log_dictionary_without_time(self): assert list(log_dict_T1_reorder.keys()) == ['Index', 'slope', 'value', 'RESULTS'] + def test_run_resets(self): + import psyneulink as pnl + T1 = pnl.TransferMechanism(name='log_test_T1', + size=2) + T2 = pnl.TransferMechanism(name='log_test_T2', + size=2) + PS = pnl.Process(name='log_test_PS', pathway=[T1, T2]) + SYS = pnl.System(name='log_test_SYS', processes=[PS]) + T1.set_log_conditions(pnl.SLOPE) + T2.set_log_conditions(pnl.SLOPE) + SYS.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + log_array_T1 = T1.log.nparray() + log_array_T2 = T2.log.nparray() + + print(log_array_T1) + print(log_array_T2) + + SYS.run(inputs={T1: [[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]]}) + + log_array_T1_second_run = T1.log.nparray() + log_array_T2_second_run = T2.log.nparray() + + print(log_array_T1_second_run) + print(log_array_T2_second_run) + def test_log_dictionary_with_time(self): - T1 = pnl.TransferMechanism(name='T1', + T1 = pnl.TransferMechanism(name='log_test_T1', size=2) - T2 = pnl.TransferMechanism(name='T2', + T2 = pnl.TransferMechanism(name='log_test_T2', function=pnl.Linear(slope=2.0), size=2) - PS = pnl.Process(name='PS', pathway=[T1, T2]) - SYS = pnl.System(name='SYS', processes=[PS]) + PS = pnl.Process(name='log_test_PS', pathway=[T1, T2]) + SYS = pnl.System(name='log_test_SYS', processes=[PS]) assert T1.loggable_items == {'InputState-0': 'OFF', 'slope': 'OFF', @@ -311,7 +337,9 @@ def test_log_dictionary_with_time(self): expected_values_T2_2 = [[[2.0, 4.0]], [[6.0, 8.0]], [[10.0, 12.0]]] + expected_values_T2 expected_slopes_T2_2 = [[2.0], [2.0], [2.0]] + expected_slopes_T2 expected_results_T2_2 = [[2.0, 4.0], [6.0, 8.0], [10.0, 12.0]] + expected_results_T2 - + print("RUNS: ", log_dict_T2_2['Run']) + print("TRIALS: ", log_dict_T2_2['Trial']) + print("TIME_STEPS: ", log_dict_T2_2['Time_step']) # assert np.allclose(expected_run_T2_2, log_dict_T2_2['Run']) # assert np.allclose(expected_trial_T2_2, log_dict_T2_2['Trial']) # assert np.allclose(expected_time_step_T2_2, log_dict_T2_2['Time_step']) @@ -320,13 +348,13 @@ def test_log_dictionary_with_time(self): assert np.allclose(expected_results_T2_2, log_dict_T2_2['RESULTS']) def test_log_dictionary_with_scheduler(self): - T1 = pnl.TransferMechanism(name='T1', + T1 = pnl.TransferMechanism(name='log_test_T1', integrator_mode=True, smoothing_factor=0.5) - T2 = pnl.TransferMechanism(name='T2', + T2 = pnl.TransferMechanism(name='log_test_T2', function=pnl.Linear(slope=6.0)) - PS = pnl.Process(name='PS', pathway=[T1, T2]) - SYS = pnl.System(name='SYS', processes=[PS]) + PS = pnl.Process(name='log_test_PS', pathway=[T1, T2]) + SYS = pnl.System(name='log_test_SYS', processes=[PS]) def pass_threshold(mech, thresh): results = mech.output_states[0].value @@ -339,11 +367,18 @@ def pass_threshold(mech, thresh): pnl.TimeScale.TRIAL: pnl.While(pass_threshold, T2, 5.0) } + T1.set_log_conditions(pnl.VALUE) + T1.set_log_conditions(pnl.SLOPE) + T1.set_log_conditions(pnl.RESULTS) T2.set_log_conditions(pnl.VALUE) + T2.set_log_conditions(pnl.SLOPE) SYS.run(inputs={T1: [[1.0]]}, termination_processing=terminate_trial) - log_dict_T2 = T2.log.nparray_dictionary(entries=['value']) - # from pprint import pprint - # pprint(log_dict_T2) + log_dict_T1 = T1.log.nparray_dictionary(entries=['RESULTS', 'slope', 'value']) + log_dict_T2 = T2.log.nparray_dictionary(entries=['value', 'slope']) + + assert list(log_dict_T1.keys()) == ['Run', 'Trial', 'Time_step', 'RESULTS', 'slope', 'value'] + assert list(log_dict_T2.keys()) == ['Run', 'Trial', 'Time_step', 'value', 'slope'] + From 27bd71cec904fa51b5f93272108644818bcf4f40 Mon Sep 17 00:00:00 2001 From: kmantel Date: Wed, 7 Feb 2018 19:06:57 -0500 Subject: [PATCH 10/17] Time: fix bug where RUN counter would not be incremented on new runs --- psyneulink/globals/environment.py | 5 ++++- tests/learning/test_multilayer.py | 16 ++++++++-------- tests/scheduling/test_time.py | 14 ++++++++++++++ 3 files changed, 26 insertions(+), 9 deletions(-) diff --git a/psyneulink/globals/environment.py b/psyneulink/globals/environment.py index 6dc62dbe6b2..1e85e38af40 100644 --- a/psyneulink/globals/environment.py +++ b/psyneulink/globals/environment.py @@ -688,10 +688,13 @@ def run(object, context=context) try: - # this will fail on processes, which do not have schedulers object.scheduler_processing.date_last_run_end = datetime.datetime.now() object.scheduler_learning.date_last_run_end = datetime.datetime.now() + + for sched in [object.scheduler_processing, object.scheduler_learning]: + sched.clock._increment_time(TimeScale.RUN) except AttributeError: + # this will fail on processes, which do not have schedulers pass # Restore learning state diff --git a/tests/learning/test_multilayer.py b/tests/learning/test_multilayer.py index bcdbc606d36..739f4dff44f 100644 --- a/tests/learning/test_multilayer.py +++ b/tests/learning/test_multilayer.py @@ -260,12 +260,12 @@ def show_target(): Hidden_Layer_2.log.log_values(VALUE) log_val = Hidden_Layer_2.log.nparray(header=False) expected_log_val = np.array( - [ - [[0]], - [[10]], - [[0]], - [[[0.8565238418942037, 0.8601053239957609, 0.8662098921116546, 0.8746933736954071]]] - ], dtype=object + [ + [[1]], + [[0]], + [[0]], + [[[0.8565238418942037, 0.8601053239957609, 0.8662098921116546, 0.8746933736954071]]] + ], dtype=object ) for i in range(len(log_val)): try: @@ -289,8 +289,8 @@ def show_target(): log_val = Middle_Weights.log.nparray(entries='matrix', header=False) expected_log_val = np.array( [ - [[0], [0], [0], [0], [0]], - [[21], [23], [25], [27], [29]], + [[1], [1], [1], [1], [1]], + [[1], [3], [5], [7], [9]], [[3], [3], [3], [3], [3]], [ [[0.09925812411381937, 0.1079522130303428, 0.12252820028789306, 0.14345816973727732], [0.30131473371328343, 0.30827285172236585, 0.3213609999139731, 0.3410707131678078], diff --git a/tests/scheduling/test_time.py b/tests/scheduling/test_time.py index 91ba4939ec1..34bddf809bf 100644 --- a/tests/scheduling/test_time.py +++ b/tests/scheduling/test_time.py @@ -1,3 +1,4 @@ +import psyneulink as pnl import pytest from psyneulink.scheduling.time import Time, TimeHistoryTree, TimeScale @@ -17,6 +18,19 @@ def test_increment(self, base, increment_time_scale, expected): base._increment_by_time_scale(increment_time_scale) assert base == expected + def test_multiple_runs(self): + t1 = pnl.TransferMechanism() + t2 = pnl.TransferMechanism() + + p = pnl.Process(pathway=[t1, t2]) + s = pnl.System(processes=[p]) + + s.run(inputs={t1: [[1.0], [2.0], [3.0]]}) + assert s.scheduler_processing.clock.time == Time(run=1, trial=0, pass_=0, time_step=0) + + s.run(inputs={t1: [[4.0], [5.0], [6.0]]}) + assert s.scheduler_processing.clock.time == Time(run=2, trial=0, pass_=0, time_step=0) + class TestTimeHistoryTree: def test_defaults(self): From efd8a092ce67740446c708c4d1261516f3599c0f Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 8 Feb 2018 16:00:19 -0500 Subject: [PATCH 11/17] adding nparray_dictionary to docs and revising nparray to use the same helper methods as nparray_dictionary --- psyneulink/globals/log.py | 234 +++++++++++++++++--------------------- 1 file changed, 105 insertions(+), 129 deletions(-) diff --git a/psyneulink/globals/log.py b/psyneulink/globals/log.py index 82219e3a90e..99d0a4ecb87 100644 --- a/psyneulink/globals/log.py +++ b/psyneulink/globals/log.py @@ -26,7 +26,7 @@ ` method. Logging can be useful not only for observing the behavior of a Component in a model, but also in debugging the model during construction. The entries of a Log can be displayed in a "human readable" table using its `print_entries ` method, and returned in CSV and numpy array formats using its and `nparray -` and `csv ` methods. +`, `nparray_dictionary ` and `csv ` methods. .. _Log_Creation: @@ -71,6 +71,8 @@ .. * `nparray ` -- returns a 2d np.array with the `entries ` in the Log. .. + * `nparray_dictionary ` -- returns a dictionary of np.arrays with the `entries ` in the Log. + .. * `csv ` -- returns a CSV-formatted string with the `entries ` in the Log. .. _Log_Loggable_Items: @@ -1285,16 +1287,7 @@ def nparray(self, header = 1 if header is True else 0 - # Get time values for all entries and sort them - time_values = [] - for entry in entries: - entry = self._dealias_owner_name(entry) - time_values.extend([item.time - for item in self.logged_entries[entry] - if all(i is not None for i in item.time)]) - # Insure that all time values are assigned, get rid of duplicates, and sort - if all(all(i is not None for i in t) for t in time_values): - time_values = sorted(list(set(time_values))) + time_values, modified_time_values = self._parse_entries_for_time_values(entries) npa = [] @@ -1322,37 +1315,8 @@ def nparray(self, else: npa = [npa] - # For each entry, iterate through its LogEntry tuples: - # for each LogEntry tuple, check whether its time matches that of the next column: - # if so, enter it in the entry's list - # if not, enter `None` and check for a match in the next time column for entry in entries: - entry = self._dealias_owner_name(entry) - row = [] - time_col = iter(time_values) - for datum in self.logged_entries[entry]: - if time_values: - # time_col = iter(time_values) - # # MODIFIED 12/14/17 OLD: - # while datum.time != next(time_col,None): - # row.append(None) - # value = None if datum.value is None else np.array(datum.value).tolist() - # row.append(value) - # MODIFIED 12/14/17 NEW: - for i in range(len(time_values)): - time = next(time_col,None) - if time is None: - break - if datum.time != time: - row.append(None) - continue - value = None if datum.value is None else np.array(datum.value).tolist() - row.append(value) - break - else: - value = None if datum.value is None else datum.value.tolist() - row.append(value) - # MODIFIED 12/14/17 END + row = self._assemble_entry_data(entry, time_values) if header: entry_header = "{}{}{}{}".format(owner_name_str, lb, self._alias_owner_name(entry), rb) @@ -1362,45 +1326,6 @@ def nparray(self, npa = np.array(npa, dtype=object) return npa - def _scan_for_duplicates(self, - time_values): - mod_time_values = sorted(list(time_values)) - time_step_increments = [] - chain = 0 - for t in range(1, len(time_values)): - if time_values[t] == time_values[t - 1]: - chain += 1 - else: - chain = 0 - time_step_increments.append(chain) - for i in range(1, len(time_values)): - update_tuple = list(time_values[i]) - update_tuple[2] += time_step_increments[i - 1] - mod_time_values[i] = tuple(update_tuple) - return mod_time_values - - def _parse_entries_for_time_values(self, entries): - time_values = [] - modified_time_values = [] - for entry in entries: - entry_time_values = [] - entry = self._dealias_owner_name(entry) - time_values.extend([item.time - for item in self.logged_entries[entry] - if all(i is not None for i in item.time)]) - entry_time_values.extend([item.time - for item in self.logged_entries[entry] - if all(i is not None for i in item.time)]) - if len(set(entry_time_values)) != len(entry_time_values): - modified_time_values.extend(self._scan_for_duplicates(entry_time_values)) - modified_time_values.extend(time_values) - - # Insure that all time values are assigned, get rid of duplicates, and sort - if all(all(i is not None for i in t) for t in time_values): - time_values = sorted(list(set(time_values))) - modified_time_values = sorted(list(set(modified_time_values))) - - return time_values, modified_time_values def nparray_dictionary(self, entries=None): @@ -1411,43 +1336,43 @@ def nparray_dictionary(self, Returns an `OrderedDict `_ - Keys - ^^^^^ - Keys of the OrderedDict are strings. + *Keys:* - Keys are the names of logged Components specified in the **entries** argument, plus either Run, Trial, and - Time_step, or Index. + Keys of the OrderedDict are strings. - If all of the data for every entry has a time value (i.e., the time field of its LogEntry is not `None`), - then the first three keys are Run, Trial and Time_step, respectively. + Keys are the names of logged Components specified in the **entries** argument, plus either Run, Trial, and + Time_step, or Index. - If any of the data for any entry does not have a time value (e.g., if that Component was not run within a - System), then all of the entries must have the same number of data (LogEntry) items, and the first key is Index. + If all of the data for every entry has a time value (i.e., the time field of its LogEntry is not `None`), + then the first three keys are Run, Trial and Time_step, respectively. + + If any of the data for any entry does not have a time value (e.g., if that Component was not run within a + System), then all of the entries must have the same number of data (LogEntry) items, and the first key is Index. - Then, the logged components follow in the same order as they were specified. + Then, the logged components follow in the same order as they were specified. - Values - ^^^^^^^ + *Values:* - Values of the OrderedDict are numpy arrays. + Values of the OrderedDict are numpy arrays. - The numpy array value for a given component key consists of that logged Component's data over many time points - or executions. + The numpy array value for a given component key consists of that logged Component's data over many time points + or executions. - The numpy array values for Run, Trial, and Time_step are counters for each of those time scales. The ith - elements of the Run, Trial, Time_step and component data arrays can be taken together to represent the value of - that component during a particular time step of a particular trial of a particular run. + The numpy array values for Run, Trial, and Time_step are counters for each of those time scales. The ith + elements of the Run, Trial, Time_step and component data arrays can be taken together to represent the value of + that component during a particular time step of a particular trial of a particular run. - For example, if log_dict is a log dictionary in which log_dict['slope'][5] = 2.0, log_dict['Time_step'][5] = 1, - log_dict['Trial'][5] = 2, and log_dict['Run'][5] = 0, then the value of slope was 2.0 during the time step 1 of - trial 2 of run 0. If there is no data for a given entry at a given time point, it is entered as `None`. + For example, if log_dict is a log dictionary in which log_dict['slope'][5] = 2.0, log_dict['Time_step'][5] = 1, + log_dict['Trial'][5] = 2, and log_dict['Run'][5] = 0, then the value of slope was 2.0 during the time step 1 of + trial 2 of run 0. If there is no data for a given entry at a given time point, it is entered as `None`. - The numpy array value for Index is a sequential index starting at zero. + The numpy array value for Index is a sequential index starting at zero. .. note:: For data without time stamps, the nth item in each dictionary key (i.e., data in the same "column") is not guaranteed to have been logged at the same time point across all keys (Components). + Arguments --------- @@ -1484,41 +1409,16 @@ def nparray_dictionary(self, num_indicies = len(self.logged_entries[self._dealias_owner_name(entries[0])]) # If there are no time values, only support entries of the same length - # Must dealias both e and zeroth entry because either/both of these could be 'value' if not all(len(self.logged_entries[self._dealias_owner_name(e)]) == num_indicies for e in entries): raise LogError("nparray output requires that all entries have time values or are of equal length") log_dict["Index"] = np.arange(num_indicies).reshape(num_indicies, 1).tolist() - # iterate through its LogEntry tuples: for entry in entries: - entry = self._dealias_owner_name(entry) - row = [] - time_col = iter(time_values) - for datum in self.logged_entries[entry]: - # iterate through log entry tuples: - # check whether tuple's time value matches the time for which data is currently being recorded - # if so, enter tuple's Component value in the entry's list - # if not, enter `None` in the entry's list - - if time_values: - for i in range(len(time_values)): - time = next(time_col, None) - if time is None: - break - if datum.time != time: - row.append(None) - continue - value = None if datum.value is None else np.array(datum.value).tolist() - row.append(value) - break - else: - value = None if datum.value is None else datum.value.tolist() - row.append(value) - - log_dict[self._alias_owner_name(entry)] = np.array(row) + log_dict[self._alias_owner_name(entry)] = np.array(self._assemble_entry_data(entry, time_values)) return log_dict + @tc.typecheck def csv(self, entries=None, owner_name:bool=False, quotes:tc.optional(tc.any(bool, str))="\'"): """ @@ -1631,6 +1531,82 @@ def _dealias_owner_name(self, name): """ return self.owner.name if name is VALUE else name + def _scan_for_duplicates(self, time_values): + # TEMPORARY FIX: this is slow and may not cover all possible cases properly! + # TBI: fix permanently in Time/SimpleTime + # In the case where scheduling leads to duplicate SimpleTime tuples (since Pass is ignored) + # _scan_for_duplicates() will increment the Time_step (index 2) value of the tuple + + mod_time_values = sorted(list(time_values)) + time_step_increments = [] + chain = 0 + for t in range(1, len(time_values)): + if time_values[t] == time_values[t - 1]: + chain += 1 + else: + chain = 0 + time_step_increments.append(chain) + for i in range(1, len(time_values)): + update_tuple = list(time_values[i]) + update_tuple[2] += time_step_increments[i - 1] + mod_time_values[i] = tuple(update_tuple) + return mod_time_values + + def _parse_entries_for_time_values(self, entries): + # Returns sorted list of SimpleTime tuples for all time points at which these entries logged values + # Also returns modified_time_values which removes duplicates (see _scan_for_duplicates) + + time_values = [] + modified_time_values = [] + for entry in entries: + entry_time_values = [] + entry = self._dealias_owner_name(entry) + time_values.extend([item.time + for item in self.logged_entries[entry] + if all(i is not None for i in item.time)]) + entry_time_values.extend([item.time + for item in self.logged_entries[entry] + if all(i is not None for i in item.time)]) + if len(set(entry_time_values)) != len(entry_time_values): + modified_time_values.extend(self._scan_for_duplicates(entry_time_values)) + modified_time_values.extend(time_values) + + # Insure that all time values are assigned, get rid of duplicates, and sort + if all(all(i is not None for i in t) for t in time_values): + time_values = sorted(list(set(time_values))) + modified_time_values = sorted(list(set(modified_time_values))) + + return time_values, modified_time_values + + def _assemble_entry_data(self, entry, time_values): + # Assembles list of entry's (component's) value at each of the time points specified in time_values + # If data was not recorded for this entry (component) for a given time point, it will be stored as None + + entry = self._dealias_owner_name(entry) + row = [] + time_col = iter(time_values) + for datum in self.logged_entries[entry]: + # iterate through log entry tuples: + # check whether tuple's time value matches the time for which data is currently being recorded + # if so, enter tuple's Component value in the entry's list + # if not, enter `None` in the entry's list + + if time_values: + for i in range(len(time_values)): + time = next(time_col, None) + if time is None: + break + if datum.time != time: + row.append(None) + continue + value = None if datum.value is None else np.array(datum.value).tolist() + row.append(value) + break + else: + value = None if datum.value is None else datum.value.tolist() + row.append(value) + return row + @property def loggable_items(self): """Return dict of loggable items. From fd18a4254066b55901277b8588f54fa895cbd0a7 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 8 Feb 2018 17:32:08 -0500 Subject: [PATCH 12/17] continuing to add tests to highlight differences between calling reinitialize on a mechanisms and on a function --- tests/mechanisms/test_transfer_mechanism.py | 42 +++++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/tests/mechanisms/test_transfer_mechanism.py b/tests/mechanisms/test_transfer_mechanism.py index c6f5fdeed10..33921c528c0 100644 --- a/tests/mechanisms/test_transfer_mechanism.py +++ b/tests/mechanisms/test_transfer_mechanism.py @@ -864,7 +864,7 @@ def test_previous_value_persistence_run(self): assert np.allclose(T.initial_value, 0.5) assert np.allclose(T.integrator_function.initializer, 0.5) - def test_previous_value_reset_initializer_execute(self): + def test_previous_value_reinitialize_execute(self): T = TransferMechanism(name="T", initial_value=0.5, integrator_mode=True, @@ -880,22 +880,40 @@ def test_previous_value_reset_initializer_execute(self): assert np.allclose(T.previous_value, 0.55) assert np.allclose(T.initial_value, 0.5) assert np.allclose(T.integrator_function.initializer, 0.5) + assert np.allclose(T.value, 0.55) - T.integrator_function.reinitialize(0.5) + # Reset integrator_function ONLY + T.integrator_function.reinitialize(0.6) - assert np.allclose(T.previous_value, 0.5) - assert np.allclose(T.initial_value, 0.5) - assert np.allclose(T.integrator_function.initializer, 0.5) + assert np.allclose(T.previous_value, 0.6) # previous_value is a property that looks at integrator_function + assert np.allclose(T.initial_value, 0.5) # initial_value is on mechanism only, does not update with int_fun + assert np.allclose(T.integrator_function.initializer, 0.6) # initializer is on integrator_function + assert np.allclose(T.value, 0.55) # on mechanism only, so does not update until execution T.execute(1.0) - # integration: 0.9*0.5 + 0.1*1.0 + 0.0 = 0.55 ---> previous value = 0.55 - # linear fn: 0.55*1.0 = 0.55 - assert np.allclose(T.previous_value, 0.55) - assert np.allclose(T.initial_value, 0.5) - assert np.allclose(T.integrator_function.initializer, 0.5) + # integration: 0.9*0.6 + 0.1*1.0 + 0.0 = 0.64 ---> previous value = 0.55 + # linear fn: 0.64*1.0 = 0.64 + assert np.allclose(T.previous_value, 0.64) # property that looks at integrator_function + assert np.allclose(T.initial_value, 0.5) # initial_value is on mechanism only, and does not update with exec + assert np.allclose(T.integrator_function.initializer, 0.5) # initializer does not change with execution + assert np.allclose(T.value, 0.64) # on mechanism, but updates with execution + + T.reinitialize(0.4) + # linear fn: 0.4*1.0 = 0.4 + assert np.allclose(T.previous_value, 0.4) # property that looks at integrator, which updated with mech reset + assert np.allclose(T.initial_value, 0.4) # updates because mechanism was reset + assert np.allclose(T.integrator_function.initializer, 0.4) # on integrator fun, but updates when mech resets + assert np.allclose(T.value, 0.4) # on mechanism, but updates with mech reset - - def test_previous_reset_initializer_run(self): + T.execute(1.0) + # integration: 0.9*0.4 + 0.1*1.0 + 0.0 = 0.46 ---> previous value = 0.46 + # linear fn: 0.46*1.0 = 0.46 + assert np.allclose(T.previous_value, 0.46) # property that looks at integrator, which updated with mech exec + assert np.allclose(T.initial_value, 0.4) # on mech, does not update with exec + assert np.allclose(T.integrator_function.initializer, 0.4) # initializer does not change with execution + assert np.allclose(T.value, 0.46) # on mechanism, but updates with exec + + def test_previous_reinitialize_run(self): T = TransferMechanism(name="T", initial_value=0.5, integrator_mode=True, From f4fe8984c517bd2ef860c6f46ad5aaf6c0cc46b4 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Feb 2018 11:27:56 -0500 Subject: [PATCH 13/17] revising outdated TransferMechanism docs for reinitialize --- .../processing/transfermechanism.py | 55 +++++++++++++++---- 1 file changed, 45 insertions(+), 10 deletions(-) diff --git a/psyneulink/components/mechanisms/processing/transfermechanism.py b/psyneulink/components/mechanisms/processing/transfermechanism.py index 2e6e7b735ea..18a7f962694 100644 --- a/psyneulink/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/components/mechanisms/processing/transfermechanism.py @@ -191,12 +191,38 @@ ` of each of its `OutputStates `, and to the 1st item of the Mechanism's `output_values ` attribute. -In some cases, it may be useful to reset the integration of the mechanism back to the original starting point, or a new -one. This can be done using the `reinitialize ` property on the mechanism's -`integrator_function `. The `reinitialize ` -property sets the `integrator_function's ` -`initializer `, `previous_value `, and -`value ` attributes to a specified value. + +.. _Transfer_Reinitialization: + +Reinitialization +~~~~~~~~~~~~ + +In some cases, it may be useful to reset the accumulation of a mechanism back to its original starting point, or a new +starting point. This is done using the `reinitialize ` method on the mechanism's +`integrator_function `, or the mechanisms's own `reinitialize +` method. + +The `reinitialize ` method of the `integrator_function +` sets: + + - the integrator_function's `initializer ` attribute + - the integrator_function's `previous_value ` attribute + - the integrator_function's `value ` attribute + + to the specified value. + +The `reinitialize ` method of the `TransferMechanism` first sets: + + - the integrator_function's `initializer ` attribute + - the integrator_function's `previous_value ` attribute + - the integrator_function's `value ` attribute + - the TransferMechanism's `initial_value ` attribute + + to the specified value. Then: + + - the specified value is passed into the mechanism's `function ` and the function is executed + - the TransferMechanism's `value ` attribute is set to the output of the function + - the TransferMechanism updates is `output_states ` A use case for `reinitialize ` is demonstrated in the following example: @@ -227,11 +253,13 @@ ... num_trials=5) #doctest: +SKIP >>> assert np.allclose(my_time_averaged_transfer_mechanism.value, 0.72105725) #doctest: +SKIP -The integrator_function's `reinitialize ` property is useful in cases when the -integrator should instead start over at its original initial value or a new one. Use `reinitialize -` to re-start the integrator_function's accumulation at 0.2: +The integrator_function's `reinitialize ` method and the TransferMechanism's +`reinitialize ` method are useful in cases when the integration should instead start +over at the original initial value, or a new one. + +Use `reinitialize ` to re-start the integrator_function's accumulation at 0.2: - >>> my_time_averaged_transfer_mechanism.integrator_function.reinitialize = np.array([[0.2]]) #doctest: +SKIP + >>> my_time_averaged_transfer_mechanism.integrator_function.reinitialize(np.array([[0.2]])) #doctest: +SKIP Run the system again to observe that my_time_averaged_transfer_mechanism's integrator_function will begin accumulating at 0.2, following the exact same trajectory as in RUN 1: @@ -245,6 +273,13 @@ my_time_averaged_transfer_mechanism's integrator_function effectively started RUN 3 in the same state as it began RUN 1. As a result, it arrived at the exact same value after 5 trials (with identical inputs). +In the examples above, `reinitialize ` was applied directly to the integrator function. +The key difference between the `integrator_function's reinitialize ` and the +`TransferMechanism's reinitialize ` is that the latter will also execute the mechanism's +function and update its output states. This is useful if the mechanism's value or any of its output state values will +be used or checked *before* the mechanism's next execution. (This is often true if the mechanism is responsible for +modulating other components or if a `Scheduler` condition depends on the mechanism's activity.) + COMMENT: .. _Transfer_Examples: From d7c7b7468bcaeb237753b12c24a8d01e157b6159 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Feb 2018 16:10:54 -0500 Subject: [PATCH 14/17] fixing bugs that prevented initialize method on system from ever executing, and adding pytests for it now that it works --- psyneulink/components/mechanisms/mechanism.py | 9 ++---- psyneulink/components/system.py | 6 +--- tests/system/test_system.py | 29 +++++++++++++++++++ 3 files changed, 32 insertions(+), 12 deletions(-) diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index 47a7a3031f1..912bc744d4e 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -2171,11 +2171,6 @@ def initialize(self, value): """Assign an initial value to the Mechanism's `value ` attribute and update its `OutputStates `. - COMMENT: - Takes a number or 1d array and assigns it to the first item of the Mechanism's - `value ` attribute. - COMMENT - Arguments --------- @@ -2187,8 +2182,8 @@ def initialize(self, value): if not iscompatible(value, self.value): raise MechanismError("Initialization value ({}) is not compatiable with value of {}". format(value, append_type_to_name(self))) - self.value[0] = value - self._update_output_states() + self.value = np.atleast_1d(value) + self._update_output_states(context="INITIAL_VALUE") def _execute(self, variable=None, diff --git a/psyneulink/components/system.py b/psyneulink/components/system.py index 70c56fc5856..c9465bc9f07 100644 --- a/psyneulink/components/system.py +++ b/psyneulink/components/system.py @@ -820,7 +820,6 @@ def __init__(self, # Required to defer assignment of self.controller by setter # until the rest of the System has been instantiated self.status = INITIALIZING - processes = processes or [] if not isinstance(processes, list): processes = [processes] @@ -2848,10 +2847,7 @@ def run(self, if self.scheduler_learning is None: self.scheduler_learning = Scheduler(graph=self.learningexecution_graph) - # initial_values = initial_values or self.initial_values - if initial_values is None and self.initial_values: - initial_values = self.initial_values - + self.initial_values = initial_values logger.debug(inputs) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 446117f4e72..db9a63d7759 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -2,6 +2,7 @@ from psyneulink.components.functions.function import BogaczEtAl, Linear, Logistic from psyneulink.components.mechanisms.processing.transfermechanism import TransferMechanism +from psyneulink.library.mechanisms.processing.transfer.recurrenttransfermechanism import RecurrentTransferMechanism from psyneulink.components.process import Process from psyneulink.components.projections.modulatory.controlprojection import ControlProjection from psyneulink.components.system import System @@ -642,3 +643,31 @@ def cyclic_extended_loop(self): assert d.systems[s] == TERMINAL assert e.systems[s] == ORIGIN assert f.systems[s] == INITIALIZE_CYCLE +class TestInitialize: + + def test_initialize_mechanisms(self): + A = TransferMechanism(name='A') + B = TransferMechanism(name='B') + C = RecurrentTransferMechanism(name='C') + + abc_process = Process(pathway=[A, B, C]) + + abc_system = System(processes=[abc_process]) + + C.log.set_log_conditions('value') + + abc_system.run(inputs={A: [1.0, 2.0, 3.0]}, + initial_values={A: 1.0, + B: 1.5, + C: 2.0}, + initialize=True) + + abc_system.run(inputs={A: [1.0, 2.0, 3.0]}, + initial_values={A: 1.0, + B: 1.5, + C: 2.0}, + initialize=False) + + # Run 1 --> Execution 1: 1 + 2 = 3 | Execution 2: 3 + 2 = 5 | Execution 3: 5 + 3 = 8 + # Run 2 --> Execution 1: 8 + 1 = 9 | Execution 2: 9 + 2 = 11 | Execution 3: 11 + 3 = 14 + assert np.allclose(C.log.nparray_dictionary('value')['value'], [[[3]], [[5]], [[8]], [[9]], [[11]], [[14]]]) From 85531c3c96f17cef1633c878894eb44bf42e1180 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Feb 2018 17:24:31 -0500 Subject: [PATCH 15/17] continuing to add tests and docs for reinitialize --- .../processing/transfermechanism.py | 5 +- tests/mechanisms/test_kwta.py | 2 +- tests/mechanisms/test_lca.py | 65 +++++++++++++++++++ .../test_recurrent_transfer_mechanism.py | 58 +++++++++++++++++ tests/mechanisms/test_transfer_mechanism.py | 3 +- 5 files changed, 128 insertions(+), 5 deletions(-) diff --git a/psyneulink/components/mechanisms/processing/transfermechanism.py b/psyneulink/components/mechanisms/processing/transfermechanism.py index 18a7f962694..d50a6d38bcf 100644 --- a/psyneulink/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/components/mechanisms/processing/transfermechanism.py @@ -277,8 +277,9 @@ The key difference between the `integrator_function's reinitialize ` and the `TransferMechanism's reinitialize ` is that the latter will also execute the mechanism's function and update its output states. This is useful if the mechanism's value or any of its output state values will -be used or checked *before* the mechanism's next execution. (This is often true if the mechanism is responsible for -modulating other components or if a `Scheduler` condition depends on the mechanism's activity.) +be used or checked *before* the mechanism's next execution. (This may be true if, for example, the mechanism is +`recurrent `, the mechanism is responsible for `modulating previous value = 0.55 + # linear fn: 0.55*1.0 = 0.55 + # Trial 2 | variable = 1.0 + 0.55 + # integration: 0.9*0.55 + 0.1*1.55 + 0.0 = 0.65 ---> previous value = 0.65 + # linear fn: 0.65*1.0 = 0.65 + assert np.allclose(R.previous_value, 0.65) + assert np.allclose(R.initial_value, 0.5) + assert np.allclose(R.integrator_function.initializer, 0.5) + + R.integrator_function.reinitialize(0.9) + + assert np.allclose(R.previous_value, 0.9) + assert np.allclose(R.initial_value, 0.5) + assert np.allclose(R.integrator_function.initializer, 0.9) + assert np.allclose(R.value, 0.65) + + R.reinitialize(0.5) + + assert np.allclose(R.previous_value, 0.5) + assert np.allclose(R.initial_value, 0.5) + assert np.allclose(R.integrator_function.initializer, 0.5) + assert np.allclose(R.value, 0.5) + + S.run(inputs={R: 1.0}, num_trials=2) + # Trial 3 + # integration: 0.9*0.5 + 0.1*1.5 + 0.0 = 0.6 ---> previous value = 0.6 + # linear fn: 0.6*1.0 = 0.6 + # Trial 4 + # integration: 0.9*0.6 + 0.1*1.6 + 0.0 = 0.7 ---> previous value = 0.7 + # linear fn: 0.7*1.0 = 0.7 + assert np.allclose(R.previous_value, 0.7) + assert np.allclose(R.initial_value, 0.5) + assert np.allclose(R.integrator_function.initializer, 0.5) \ No newline at end of file diff --git a/tests/mechanisms/test_transfer_mechanism.py b/tests/mechanisms/test_transfer_mechanism.py index 33921c528c0..767881581bd 100644 --- a/tests/mechanisms/test_transfer_mechanism.py +++ b/tests/mechanisms/test_transfer_mechanism.py @@ -12,7 +12,6 @@ from psyneulink.components.process import Process from psyneulink.components.system import System - class TestTransferMechanismInputs: # VALID INPUTS @@ -913,7 +912,7 @@ def test_previous_value_reinitialize_execute(self): assert np.allclose(T.integrator_function.initializer, 0.4) # initializer does not change with execution assert np.allclose(T.value, 0.46) # on mechanism, but updates with exec - def test_previous_reinitialize_run(self): + def test_reinitialize_run(self): T = TransferMechanism(name="T", initial_value=0.5, integrator_mode=True, From 31f5ed78acfb91ed055b1c8d73a75f21ee33b238 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Feb 2018 18:45:15 -0500 Subject: [PATCH 16/17] documenting reinitialize on mechanism --- psyneulink/components/functions/function.py | 206 ++++++++---------- psyneulink/components/mechanisms/mechanism.py | 39 ++++ 2 files changed, 128 insertions(+), 117 deletions(-) diff --git a/psyneulink/components/functions/function.py b/psyneulink/components/functions/function.py index f33b183f3df..90a8389a5e5 100644 --- a/psyneulink/components/functions/function.py +++ b/psyneulink/components/functions/function.py @@ -4182,13 +4182,20 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): def reinitialize(self, new_previous_value=None, **kwargs): """ + Effectively begins accumulation over again at the specified value. + Sets - `previous_value ` - `initializer ` - `value ` - to the quantity specified, which effectively begins accumulation over again at the specified value + to the quantity specified. + + For specific types of Integrator functions, additional values, such as initial time, must be specified, and + additional attributes are reset. + + If no arguments are specified, then the instance default for `initializer ` is used. """ if new_previous_value is None: new_previous_value = self.instance_defaults.initializer @@ -4291,15 +4298,6 @@ class SimpleIntegrator( previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - owner : Component `component ` to which the Function has been assigned. @@ -4507,15 +4505,6 @@ class LCAIntegrator( previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - owner : Component `component ` to which the Function has been assigned. @@ -4723,15 +4712,6 @@ class ConstantIntegrator(Integrator): # --------------------------------------- If initializer is a list or array, it must be the same length as `variable `. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - previous_value : 1d np.array : default ClassDefaults.variable stores previous value to which `rate ` and `noise ` will be added. @@ -4943,15 +4923,6 @@ class AdaptiveIntegrator( previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - owner : Component `component ` to which the Function has been assigned. @@ -5238,21 +5209,6 @@ class DriftDiffusionIntegrator( previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Takes 2 items (i.e my_integrator.reinitialize = 1.0, 2.0), each of which is a float or array - - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified in reinitialize[0]. - - Sets `previous_time ` to the quantity specified in reinitialize[1]. - - Effectively begins accumulation over again at the original starting point and time, or new ones - threshold : float : default 0.0 when used properly determines the threshold (boundaries) of the drift diffusion process (i.e., at which the integration process is assumed to terminate). @@ -5398,6 +5354,22 @@ def function(self, return adjusted_value def reinitialize(self, new_previous_value=None, new_previous_time=None): + """ + In effect, begins accumulation over again at the original starting point and time, or new ones. + + Sets + + - `previous_value ` + - `initializer ` + - `value ` + + to the value specified in the first argument. + + Sets `previous_time ` to the value specified in the second argument. + + If no arguments are specified, then the instance defaults for `initializer + ` and `t0 ` are used. + """ if new_previous_value is None: new_previous_value = self.instance_defaults.initializer if new_previous_time is None: @@ -5505,20 +5477,6 @@ class OrnsteinUhlenbeckIntegrator( previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified in reinitialize[0] - - Sets `previous_time ` to the quantity specified in reinitialize[1]. - - Effectively begins accumulation over again at the specified value and time - - previous_time : float stores previous time at which the function was executed and accumulates with each execution according to `time_step_size `. @@ -5653,6 +5611,22 @@ def function(self, return adjusted_value def reinitialize(self, new_previous_value=None, new_previous_time=None): + """ + In effect, begins accumulation over again at the original starting point and time, or new ones. + + Sets + + - `previous_value ` + - `initializer ` + - `value ` + + to the value specified in the first argument. + + Sets `previous_time ` to the value specified in the second argument. + + If no arguments are specified, then the instance defaults for `initializer + ` and `t0 ` are used. + """ if new_previous_value is None: new_previous_value = self.instance_defaults.initializer if new_previous_time is None: @@ -6030,27 +6004,6 @@ class FHNIntegrator(Integrator): # -------------------------------------------- time_constant_w : float : default 12.5 scaling factor on the dv/dt equation - reinitialize : float or np.array - Takes 3 items (i.e my_integrator.reinitialize = 1.0, 2.0, 3.0), each of which is a float or array - - Sets - - - `previous_v ` - - `initial_v ` - - to the quantity specified in reinitialize[0]. - - Sets - - - `previous_w ` - - `initial_w ` - - to the quantity specified in reinitialize[1]. - - Sets `previous_time ` to the quantity specified in reinitialize[2]. - - Effectively begins accumulation over again at the specified v, w, and time. - prefs : PreferenceSet or specification dict : default Function.classPreferences the `PreferenceSet` for the Function (see `prefs ` for details). """ @@ -6428,6 +6381,28 @@ def dw_dt(time, w, v, mode, a_w, b_w, c_w, uncorrelated_activity, time_constant_ return self.previous_v, self.previous_w, self.previous_time def reinitialize(self, new_previous_v=None, new_previous_w=None, new_previous_time=None): + """ + Effectively begins accumulation over again at the specified v, w, and time. + + Sets + + - `previous_v ` + - `initial_v ` + + to the quantity specified in the first argument. + + Sets + + - `previous_w ` + - `initial_w ` + + to the quantity specified in the second argument. + + Sets `previous_time ` to the quantity specified in the third argument. + + If no arguments are specified, then the instance defaults for `initial_v `, `initial_w + ` and `t_0 ` are used. + """ if new_previous_v is None: new_previous_v = self.instance_defaults.initial_v if new_previous_w is None: @@ -6551,15 +6526,6 @@ class AccumulatorIntegrator(Integrator): # ------------------------------------ stores previous value to which `rate ` and `noise ` will be added. - reinitialize : float or np.array - Sets - - - `previous_value ` - - `initializer ` - - `value ` - - to the quantity specified, which effectively begins accumulation over again at the specified value - owner : Component `component ` to which the Function has been assigned. @@ -6886,27 +6852,6 @@ class AGTUtilityIntegrator(Integrator): # ------------------------------------- stores previous value with which `variable ` is integrated using the EWMA filter and long term parameters - reinitialize : float or np.array - Takes 2 items (i.e my_integrator.reinitialize = 1.0, 2.0), each of which is a float or array - - Sets - - - `previous_short_term_utility ` - - `initial_short_term_utility ` - - to the quantity specified in reinitialize[0]. - - Sets - - - `previous_long_term_utility ` - - `initial_long_term_utility ` - - to the quantity specified in reinitialize[1]. - - sets `value ` to the to the quantity specified in reinitialize[2]. - - This effectively begins accumulation over again at the specified utilities. - owner : Component `component ` to which the Function has been assigned. @@ -7147,6 +7092,33 @@ def combine_utilities(self, short_term_utility, long_term_utility): return value + offset def reinitialize(self, short=None, long=None): + + """ + Effectively begins accumulation over again at the specified utilities. + + Sets + + - `previous_short_term_utility ` + - `initial_short_term_utility ` + + to the quantity specified in the first argument. + + Sets + + - `previous_long_term_utility ` + - `initial_long_term_utility ` + + to the quantity specified in the second argument. + + sets `value ` by computing it based on the newly updated values for + `previous_short_term_utility ` and + `previous_long_term_utility `. + + If no arguments are specified, then the instance defaults for `initial_short_term_utility + ` and `initial_long_term_utility + ` are used. + """ + if short is None: short = self.instance_defaults.initial_short_term_utility if long is None: diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index 912bc744d4e..f6564ffe71e 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -1794,6 +1794,45 @@ def _add_projection_from_mechanism(self, receiver, state, projection, context=No _add_projection_from(sender=self, state=state, projection_spec=projection, receiver=receiver, context=context) def reinitialize(self, *args): + """ + If the mechanism's `function ` is an `Integrator`, or if the mechanism has and + `integrator_function ` (see `TransferMechanism`), this method + effectively begins the function's accumulation over again at the specified value, and updates related + attributes on the mechanism. + + If the mechanism's `function ` is an `Integrator`: + + Reinitializing first calls the function's own `reinitialize ` method, which + typically sets: + + - `previous_value ` + - `initializer ` + - `value ` + + to the quantity specified. For specific types of Integrator functions, additional values, such as + initial time, must be specified, and additional attributes are reset. See individual functions for + details. + + Then, the mechanism sets its `value `. + + If the mechanism has an `integrator_function `: + + Reinitializing first calls the `integrator_function's ` own + `reinitialize ` method, which typically sets: + + - `previous_value ` + - `initializer ` + - `value ` + + to the quantity specified. For specific types of Integrator functions, additional values, such as + initial time, must be specified, and additional attributes are reset. See individual functions for + details. + + Then, the mechanism executes its `function ` using the quantity specified as the + function's variable. The mechanism's `value `. + """ from psyneulink.components.functions.function import Integrator # If the primary function of the mechanism is an integrator: From 6c1b1532ddf30d6f10c3feee7e2fcbc345546713 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 9 Feb 2018 18:49:02 -0500 Subject: [PATCH 17/17] documentation typos --- psyneulink/components/mechanisms/mechanism.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/psyneulink/components/mechanisms/mechanism.py b/psyneulink/components/mechanisms/mechanism.py index f6564ffe71e..6b87e0148a7 100644 --- a/psyneulink/components/mechanisms/mechanism.py +++ b/psyneulink/components/mechanisms/mechanism.py @@ -1800,9 +1800,9 @@ def reinitialize(self, *args): effectively begins the function's accumulation over again at the specified value, and updates related attributes on the mechanism. - If the mechanism's `function ` is an `Integrator`: + If the mechanism's `function ` is an `Integrator`: - Reinitializing first calls the function's own `reinitialize ` method, which + `reinitialize ` first calls the function's own `reinitialize ` method, which typically sets: - `previous_value ` @@ -1813,12 +1813,12 @@ def reinitialize(self, *args): initial time, must be specified, and additional attributes are reset. See individual functions for details. - Then, the mechanism sets its `value ` to the quantity specified, and updates its `output states `. If the mechanism has an `integrator_function `: - Reinitializing first calls the `integrator_function's ` own + `reinitialize ` first calls the `integrator_function's ` own `reinitialize ` method, which typically sets: - `previous_value ` @@ -1829,8 +1829,8 @@ def reinitialize(self, *args): initial time, must be specified, and additional attributes are reset. See individual functions for details. - Then, the mechanism executes its `function ` using the quantity specified as the - function's variable. The mechanism's `value ` using the quantity specified as the + function's variable. The mechanism's `value ` is set to the output of its function. Finally, the mechanism updates its `output states `. """ from psyneulink.components.functions.function import Integrator