From 7ebd31273171682c30d026cc983a157eddae0670 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 14:06:12 -0400 Subject: [PATCH 01/69] cleaning up docs - verifying that TransferMechanism docs match the code --- .../ProcessingMechanisms/TransferMechanism.py | 52 +++++++++---------- 1 file changed, 24 insertions(+), 28 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py index 66685b43008..ad657ea63fe 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py @@ -83,14 +83,17 @@ * `noise `: applied element-wise to the input before transforming it. .. - * `time_constant `: if the `time_scale ` attribute - is `TimeScale.TIME_STEP`, the input is exponentially time-averaged before transforming it, using the value - of the `time_constant ` attribute as the rate of integration (a higher value - specifies a faster rate); if `time_scale ` is `TimeScale.TRIAL`, - `time_constant ` is ignored. - .. * `range `: caps all elements of the `function ` result by the lower and upper values specified by range. + .. + * `integrator_mode `: when `integrator_mode ` + is set to True, a TransferMechanism exponentially time-averages its input before transforming it. + .. + * `time_constant `: if the `integrator_mode ` + attribute is set to True, the `time_constant ` attribute is the rate of + integration (a higher value specifies a faster rate); if `integrator_mode ` is + False, `time_constant ` is ignored and time-averaging does not occur. + .. _Transfer_OutputState: @@ -194,6 +197,7 @@ class TransferMechanism(ProcessingMechanism_Base): initial_value=None, \ noise=0.0, \ time_constant=1.0, \ + integrator_mode=False, \ range=(float:min, float:max),\ time_scale=TimeScale.TRIAL, \ params=None, \ @@ -246,8 +250,8 @@ class TransferMechanism(ProcessingMechanism_Base): or a custom function. initial_value : value, list or np.ndarray : default Transfer_DEFAULT_BIAS - specifies the starting value for time-averaged input (only relevant if - `time_constant ` is not 1.0). + specifies the starting value for time-averaged input (only relevant if `integrator_mode + ` is True and `time_constant ` is not 1.0). :py:data:`Transfer_DEFAULT_BIAS SHOULD RESOLVE TO VALUE>` noise : float or function : default 0.0 @@ -256,11 +260,10 @@ class TransferMechanism(ProcessingMechanism_Base): if it is a function, it must return a scalar value. time_constant : float : default 1.0 - the time constant for exponential time averaging of input when the Mechanism is executed with `time_scale` - set to `TimeScale.TIME_STEP`:: + the time constant for exponential time averaging of input when the Mechanism is executed with `integrator_mode` + set to True:: - result = (time_constant * current input) + - (1-time_constant * result on previous time_step) + result = (time_constant * current input) + ((1-time_constant) * result on previous time_step) range : Optional[Tuple[float, float]] specifies the allowable range for the result of `function `: @@ -273,11 +276,6 @@ class TransferMechanism(ProcessingMechanism_Base): the Mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - This must be set to `TimeScale.TIME_STEP` for the `time_constant ` - parameter to have an effect. - name : str : default TransferMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` @@ -312,8 +310,8 @@ class TransferMechanism(ProcessingMechanism_Base): THE FOLLOWING IS THE CURRENT ASSIGNMENT COMMENT initial_value : value, list or np.ndarray : Transfer_DEFAULT_BIAS - determines the starting value for time-averaged input - (only relevant if `time_constant ` parameter is not 1.0). + specifies the starting value for time-averaged input (only relevant if `integrator_mode + ` is True and `time_constant ` is not 1.0). :py:data:`Transfer_DEFAULT_BIAS SHOULD RESOLVE TO VALUE>` noise : float or function : default 0.0 @@ -322,10 +320,14 @@ class TransferMechanism(ProcessingMechanism_Base): if it is a function, it must return a scalar value. time_constant : float : default 1.0 - the time constant for exponential time averaging of input - when the Mechanism is executed using the `TIME_STEP` `TimeScale`:: + the time constant for exponential time averaging of input when the Mechanism is executed with `integrator_mode` + set to True:: + + result = (time_constant * current input) + ( (1-time_constant) * result on previous time_step) - result = (time_constant * current input) + (1-time_constant * result on previous time_step) + integrator_mode : boolean : default False + when set to True, the Mechanism time averages its input according to an exponentially weighted moving average + (see `time_constant `). range : Optional[Tuple[float, float]] determines the allowable range of the result: the first value specifies the minimum allowable value @@ -333,9 +335,6 @@ class TransferMechanism(ProcessingMechanism_Base): is set to the value of `range ` it exceeds. If `function ` is `Logistic`, `range ` is set by default to (0,1). - previous_input : float - the value of the `variable ` on the previous execution of the Mechanism. - value : 2d np.array [array(float64)] result of executing `function `. @@ -361,9 +360,6 @@ class TransferMechanism(ProcessingMechanism_Base): **output_states** argument of the Mechanism's constructor (see `TransferMechanism Standard OutputStates `). - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - name : str : default TransferMechanism- the name of the Mechanism. Specified in the **name** argument of the constructor for the Projection; From 436bd97a45ae098963735b7eccb1b98b87e3ed40 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 15:44:46 -0400 Subject: [PATCH 02/69] cleaning up docs - Integrator (base), Adaptive, Simple, Constant --- PsyNeuLink/Components/Functions/Function.py | 227 +++----------------- 1 file changed, 35 insertions(+), 192 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 9f750e34f8a..3549203d3da 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -25,7 +25,6 @@ * `LinearMatrix` Integrator Functions: - * `Integrator` * `SimpleIntegrator` * `ConstantIntegrator` * `AdaptiveIntegrator` @@ -3021,109 +3020,11 @@ class IntegratorFunction(Function_Base): # • can noise and initializer be an array? If so, validated in validate_param? class Integrator(IntegratorFunction): # -------------------------------------------------------------------------------- - """ - Integrator( \ - default_variable=None, \ - rate=1.0, \ - - noise=0.0, \ - time_step_size=1.0, \ - initializer, \ - params=None, \ - owner=None, \ - prefs=None, \ - ) - - .. _Integrator: - - Integrate current value of `variable ` with its prior value. - - Arguments - --------- - - default_variable : number, list or np.array : default ClassDefaults.variable - specifies a template for the value to be integrated; if it is a list or array, each element is independently - integrated. - - rate : float, list or 1d np.array : default 1.0 - specifies the rate of integration. If it is a list or array, it must be the same length as - `variable ` (see `rate ` for details). - - noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 - specifies random value to be added in each call to `function `. (see - `noise ` for details). - - time_step_size : float : default 0.0 - determines the timing precision of the integration process when `integration_type ` - is set to DIFFUSION (see `time_step_size ` for details. - - initializer float, list or 1d np.array : default 0.0 - specifies starting value for integration. If it is a list or array, it must be the same length as - `default_variable ` (see `initializer ` for details). + """Function that accumulates over many executions by storing its value from the most recent execution and using this + to compute its new value - params : Optional[Dict[param keyword, param value]] - a `parameter dictionary ` that specifies the parameters for the - function. Values specified for parameters in the dictionary override any assigned to those parameters in - arguments of the constructor. - - owner : Component - `component ` to which to assign the Function. - - prefs : Optional[PreferenceSet or specification dict : Function.classPreferences] - the `PreferenceSet` for the Function. If it is not specified, a default is assigned using `classPreferences` - defined in __init__.py (see :doc:`PreferenceSet ` for details). - - - Attributes - ---------- - - variable : number or np.array - current input value some portion of which (determined by `rate `) that will be - added to the prior value; if it is an array, each element is independently integrated. - - integration_type : [**NEEDS TO BE SPECIFIED**] : default [**NEEDS TO BE SPECIFIED**] - [**NEEDS TO BE SPECIFIED**] - - rate : float or 1d np.array - determines the rate of integration based on current and prior values. If integration_type is set to ADAPTIVE, - all elements must be between 0 and 1 (0 = no change; 1 = instantaneous change). If it has a single element, it - applies to all elements of `variable `; if it has more than one element, each element - applies to the corresponding element of `variable `. - - noise : float, function, list, or 1d np.array - specifies random value to be added in each call to `function `. - - If noise is a list or array, it must be the same length as `variable `. If noise is - specified as a single float or function, while `variable ` is a list or array, - noise will be applied to each variable element. In the case of a noise function, this means that the function - will be executed separately for each variable element. - - Note that in the case of DIFFUSION, noise must be specified as a float (or list or array of floats) because this - value will be used to construct the standard DDM probability distribution. For all other types of integration, - in order to generate random noise, we recommend that you instead select a probability distribution function - (see `Distribution Functions ` for details), which will generate a new noise value from - its distribution on each execution. If noise is specified as a float or as a function with a fixed output (or a - list or array of these), then the noise will simply be an offset that remains the same across all executions. - - initializer : 1d np.array or list - determines the starting value for integration (i.e., the value to which - `previous_value ` is set. - - If initializer is a list or array, it must be the same length as `variable `. If - initializer is specified as a single float or function, while `variable ` is a list or - array, initializer will be applied to each variable element. In the case of an initializer function, this means - that the function will be executed separately for each variable element. - - previous_value : 1d np.array : default ClassDefaults.variable - stores previous value with which `variable ` is integrated. - - owner : Mechanism - `component ` to which the Function has been assigned. - - prefs : PreferenceSet or specification dict : Projection.classPreferences - the `PreferenceSet` for function. Specified in the **prefs** argument of the constructor for the function; - if it is not specified, a default is assigned using `classPreferences` defined in __init__.py - (see :doc:`PreferenceSet ` for details). + All TransferFunctions must have the attribute `previous_value`, which specifies the value of the function on the + previous execution, and the attribute `initializer`, which sets `previous_value` on the first execution """ @@ -3270,45 +3171,6 @@ def _validate_noise(self, noise, var): "Noise parameter ({}) for {} must be a float, function, or array/list of these." .format(noise, self.name)) - # def _validate_initializer(self, initializer, var): - # # Initializer is a list or array - # if isinstance(initializer, (np.ndarray, list)): - # # Variable is a list/array - # if isinstance(var, (np.ndarray, list)): - # if len(initializer) != np.array(var).size: - # # Formatting initializer for proper display in error message - # try: - # formatted_initializer = list(map(lambda x: x.__qualname__, initializer)) - # except AttributeError: - # formatted_initializer = initializer - # raise FunctionError( - # "The length ({}) of the array specified for the initializer parameter ({}) of {} " - # "must match the length ({}) of the default input ({}). If initializer is specified as" - # " an array or list, it must be of the same size as the input." - # .format(len(initializer), formatted_initializer, self.name, np.array(var).size, - # var)) - # else: - # for initializer_item in initializer: - # if not isinstance(initializer_item, (float, int)) and not callable(initializer_item): - # raise FunctionError( - # "The elements of a initializer list or array must be floats or functions.") - # - # - # # Variable is not a list/array - # else: - # raise FunctionError("The initializer parameter ({}) for {} may only be a list or array if the " - # "default input value is also a list or array.".format(initializer, self.name)) - # - # # # Elements of list/array have different types - # # if not all(isinstance(x, type(initializer[0])) for x in initializer): - # # raise FunctionError("All elements of initializer list/array ({}) for {} must be of the same type. " - # # .format(initializer, self.name)) - # - # elif not isinstance(initializer, (float, int)) and not callable(initializer): - # raise FunctionError( - # "Initializer parameter ({}) for {} must be a float, function, or array/list of these." - # .format(initializer, self.name)) - def _try_execute_param(self, param, var): # param is a list; if any element is callable, execute it @@ -3422,11 +3284,12 @@ class SimpleIntegrator( noise will be applied to each variable element. In the case of a noise function, this means that the function will be executed separately for each variable element. - **Note:** - In order to generate random noise, we recommend selecting a probability distribution function - (see `Distribution Functions ` for details), which will generate a new noise value from - its distribution on each execution. If noise is specified as a float or as a function with a fixed output, then - the noise will simply be an offset that remains the same across all executions. + + .. note:: + In order to generate random noise, we recommend selecting a probability distribution function + (see `Distribution Functions ` for details), which will generate a new noise value from + its distribution on each execution. If noise is specified as a float or as a function with a fixed output, then + the noise will simply be an offset that remains the same across all executions. initializer : float, 1d np.array or list determines the starting value for integration (i.e., the value to which @@ -3434,14 +3297,6 @@ class SimpleIntegrator( If initializer is a list or array, it must be the same length as `variable `. - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. - previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. @@ -3519,9 +3374,6 @@ def function(self, function. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the function is executed on the time_step or trial time scale. - Returns ------- @@ -3583,10 +3435,11 @@ class ConstantIntegrator( .. _ConstantIntegrator: - Integrates prior value by adding `rate ` and `noise `. Ignores + Integrates prior value by adding `rate ` and `noise `. (Ignores `variable `). - `previous_value ` + `rate ` + `noise ` + `previous_value ` + `rate ` + + `noise ` Arguments --------- @@ -3645,11 +3498,11 @@ class ConstantIntegrator( noise will be applied to each variable element. In the case of a noise function, this means that the function will be executed separately for each variable element. - **Note:** - In order to generate random noise, we recommend selecting a probability distribution function - (see `Distribution Functions ` for details), which will generate a new noise value from - its distribution on each execution. If noise is specified as a float or as a function with a fixed output, then - the noise will simply be an offset that remains the same across all executions. + .. note:: + In order to generate random noise, we recommend selecting a probability distribution function + (see `Distribution Functions ` for details), which will generate a new noise value from + its distribution on each execution. If noise is specified as a float or as a function with a fixed output, then + the noise will simply be an offset that remains the same across all executions. initializer : float, 1d np.array or list determines the starting value for integration (i.e., the value to which @@ -3657,14 +3510,6 @@ class ConstantIntegrator( If initializer is a list or array, it must be the same length as `variable `. - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. - previous_value : 1d np.array : default ClassDefaults.variable stores previous value to which `rate ` and `noise ` will be added. @@ -3738,7 +3583,7 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: `previous_value ` combined with `rate ` and + Return: the sum of `previous_value `, `rate `, and `noise `. Arguments @@ -3805,7 +3650,11 @@ class AdaptiveIntegrator( .. _AdaptiveIntegrator: - Integrate current value of `variable ` with its prior value. + Computes an exponentially weighted moving average. + + (1 - `rate `) * `previous_value ` + `rate ` * + `variable ` + `noise ` + Arguments --------- @@ -3815,7 +3664,7 @@ class AdaptiveIntegrator( integrated. rate : float, list or 1d np.array : default 1.0 - specifies the rate of integration. If it is a list or array, it must be the same length as + specifies the smoothing factor of the EWMA. If it is a list or array, it must be the same length as `variable ` (see `rate ` for details). noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 @@ -3843,16 +3692,18 @@ class AdaptiveIntegrator( ---------- variable : number or np.array - current input value some portion of which (determined by `rate `) that will be + current input value some portion of which (determined by `rate `) will be added to the prior value; if it is an array, each element is independently integrated. rate : float or 1d np.array - determines the rate of integration based on current and prior values. All rate elements must be between 0 and 1 - (0 = no change; 1 = instantaneous change). + determines the smoothing factor of the EWMA. All rate elements must be between 0 and 1 (rate = 0 --> no change, + `variable ` is ignored; rate = 1 --> + `previous_value ` is ignored). - If rate is a float, it is applied to all elements of `variable `; - if it has more than one element, each element is applied to the corresponding element of - `variable `. + If rate is a float, it is applied to all elements of `variable ` (and + `previous_value `); if it has more than one element, each element is applied + to the corresponding element of `variable ` (and + `previous_value `). noise : float, function, list, or 1d np.array specifies random value to be added in each call to `function `. @@ -3870,19 +3721,11 @@ class AdaptiveIntegrator( the noise will simply be an offset that remains the same across all executions. initializer : float, 1d np.array or list - determines the starting value for integration (i.e., the value to which - `previous_value ` is set. + determines the starting value for time-averaging (i.e., the value to which + `previous_value ` is originally set). If initializer is a list or array, it must be the same length as `variable `. - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. - previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. From 0f0c7c426b2111c7e6a51e52aba51abfc7d16cc6 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 16:55:16 -0400 Subject: [PATCH 03/69] cleaning up docs for DriftDiffusionIntegrator & adding 'previous_time' and 't0' to the function for tracking response time; added pytest for the new attribs --- PsyNeuLink/Components/Functions/Function.py | 31 +++++++++++---------- tests/mechanisms/test_ddm_mechanism.py | 25 +++++++++++++++-- 2 files changed, 40 insertions(+), 16 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 3549203d3da..24641b743f3 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -3927,7 +3927,7 @@ class DriftDiffusionIntegrator( .. _DriftDiffusionIntegrator: - Integrate current value of `variable ` with its prior value. + Accumulate evidence overtime based on a stimulus and previous position. Arguments --------- @@ -3948,6 +3948,10 @@ class DriftDiffusionIntegrator( determines the timing precision of the integration process (see `time_step_size ` for details. + t0 : float + determines the start time of the integration process and is used to compute the RESPONSE_TIME output state of + the DDM Mechanism. + initializer float, list or 1d np.array : default 0.0 specifies starting value for integration. If it is a list or array, it must be the same length as `default_variable ` (see `initializer ` for details). @@ -3988,19 +3992,18 @@ class DriftDiffusionIntegrator( determines the timing precision of the integration process and is used to scale the `noise ` parameter appropriately. + t0 : float + determines the start time of the integration process and is used to compute the RESPONSE_TIME output state of + the DDM Mechanism. + initializer : float, 1d np.array or list determines the starting value for integration (i.e., the value to which `previous_value ` is set. If initializer is a list or array, it must be the same length as `variable `. - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. + previous_time : float + stores previous time at which the function was executed and accumulates according to time_step_size previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. @@ -4037,6 +4040,7 @@ def __init__(self, noise=0.0, offset: parameter_spec = 0.0, time_step_size=1.0, + t0=0.0, initializer=ClassDefaults.variable, params: tc.optional(dict) = None, owner=None, @@ -4046,6 +4050,7 @@ def __init__(self, # Assign args to params and functionParams dicts (kwConstants must == arg names) params = self._assign_args_to_param_dicts(rate=rate, time_step_size=time_step_size, + t0=t0, initializer=initializer, noise=noise, offset=offset, @@ -4062,7 +4067,7 @@ def __init__(self, # Reassign to kWInitializer in case default value was overridden self.previous_value = self.initializer - + self.previous_time = self.t0 self.auto_dependent = True def _validate_noise(self, noise, var): @@ -4077,8 +4082,8 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: some fraction of `variable ` combined with some fraction of - `previous_value `. + Return: previous_value + rate * new_value * time_step_size + ( + time_step_size * noise) * np.random.normal() Arguments --------- @@ -4091,9 +4096,6 @@ def function(self, function. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the function is executed on the time_step or trial time scale. - Returns ------- @@ -4126,6 +4128,7 @@ def function(self, # (don't want to count it as an execution step) if not context or not INITIALIZING in context: self.previous_value = adjusted_value + self.previous_time += time_step_size return adjusted_value diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index 14472803fd8..6f973753ca5 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -1,6 +1,6 @@ import pytest import typecheck - +import numpy as np from PsyNeuLink.Components.Component import ComponentError from PsyNeuLink.Components.Functions.Function import BogaczEtAl, DriftDiffusionIntegrator, FunctionError, NormalDist from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.DDM import DDM, DDMError @@ -271,7 +271,6 @@ def test_DDM_input_list_len_2(): def test_DDM_input_fn(): with pytest.raises(TypeError) as error_text: stim = NormalDist().function - print(stim) T = DDM( name='DDM', function=DriftDiffusionIntegrator( @@ -525,3 +524,25 @@ def test_DDM_size_too_long(): time_scale=TimeScale.TIME_STEP ) assert "is greater than 1, implying there are" in str(error_text.value) + + +def test_DDM_size_too_long(): + + D = DDM( + name='DDM', + function=DriftDiffusionIntegrator( + noise=0.0, + rate=-5.0, + time_step_size=0.2, + t0=0.5 + ) + ) + # t0 = 0.5 + np.testing.assert_allclose(D.function_object.previous_time, 0.5, atol=1e-08) + D.execute(10) # t_1 = 0.5 + 0.2 = 0.7 + np.testing.assert_allclose(D.function_object.previous_time, 0.7, atol=1e-08) + D.execute(10) # t_2 = 0.7 + 0.2 = 0.9 + D.execute(10) # t_3 = 0.9 + 0.2 = 1.1 + D.execute(10) # t_4 = 1.1 + 0.2 = 1.3 + D.execute(10) # t_4 = 1.3 + 0.2 = 1.5 + np.testing.assert_allclose(D.function_object.previous_time, 1.5, atol=1e-08) From 65eae5cd7d562eaac78669136cdabc7572080ddf Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 17:08:18 -0400 Subject: [PATCH 04/69] DDM's RESPONSE_TIME output state is now working with DriftDiffusionIntegrator bc the DDM looks at its functions previous_time attrib; + pytest for this --- .../Mechanisms/ProcessingMechanisms/DDM.py | 2 +- tests/mechanisms/test_ddm_mechanism.py | 23 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 4af84145b28..967d285da13 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -901,7 +901,7 @@ def _execute(self, logger.info('{0} {1} has reached threshold {2}'.format(type(self).__name__, self.name, self.threshold)) self.is_finished = True - return np.array([result, [0.0], [0.0], [0.0]]) + return np.array([result, self.function_object.previous_time]) # EXECUTE ANALYTIC SOLUTION (TRIAL TIME SCALE) ----------------------------------------------------------- diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index 6f973753ca5..e36ab7b5d02 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -526,7 +526,7 @@ def test_DDM_size_too_long(): assert "is greater than 1, implying there are" in str(error_text.value) -def test_DDM_size_too_long(): +def test_DDM_time(): D = DDM( name='DDM', @@ -537,12 +537,15 @@ def test_DDM_size_too_long(): t0=0.5 ) ) - # t0 = 0.5 - np.testing.assert_allclose(D.function_object.previous_time, 0.5, atol=1e-08) - D.execute(10) # t_1 = 0.5 + 0.2 = 0.7 - np.testing.assert_allclose(D.function_object.previous_time, 0.7, atol=1e-08) - D.execute(10) # t_2 = 0.7 + 0.2 = 0.9 - D.execute(10) # t_3 = 0.9 + 0.2 = 1.1 - D.execute(10) # t_4 = 1.1 + 0.2 = 1.3 - D.execute(10) # t_4 = 1.3 + 0.2 = 1.5 - np.testing.assert_allclose(D.function_object.previous_time, 1.5, atol=1e-08) + time_0 = D.function_object.previous_time # t0 = 0.5 + np.testing.assert_allclose(time_0, [0.5], atol=1e-08) + + time_1 = D.execute(10)[1] # t_1 = 0.5 + 0.2 = 0.7 + print(time_1) + # np.testing.assert_allclose(time_1, [0.7], atol=1e-08) + + # t_11 = + for i in range(10): # t11 = 0.7 + 10*0.2 = 2.7 + D.execute(10) + time_12 = D.execute(10)[1] # t_12 = 2.7 + 0.2 = 2.9 + np.testing.assert_allclose(time_12, [2.9], atol=1e-08) From 96ce6b3bce6aef37ff6c3168add063b5c2faa310 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 17:19:47 -0400 Subject: [PATCH 05/69] fixing small numpy bugs in DDM pytests --- tests/mechanisms/test_ddm_mechanism.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index e36ab7b5d02..23af64c8d42 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -443,7 +443,7 @@ def test_DDM_size_int_inputs_(): time_scale=TimeScale.TIME_STEP ) val = T.execute([.4]).tolist() - assert val == [[-2.0], [0.0], [0.0], [0.0]] + assert val == [[-2.0], [1.0]] # ------------------------------------------------------------------------------------------------ @@ -537,15 +537,13 @@ def test_DDM_time(): t0=0.5 ) ) - time_0 = D.function_object.previous_time # t0 = 0.5 + time_0 = D.function_object.previous_time # t_0 = 0.5 np.testing.assert_allclose(time_0, [0.5], atol=1e-08) - time_1 = D.execute(10)[1] # t_1 = 0.5 + 0.2 = 0.7 - print(time_1) - # np.testing.assert_allclose(time_1, [0.7], atol=1e-08) + time_1 = D.execute(10)[1][0] # t_1 = 0.5 + 0.2 = 0.7 + np.testing.assert_allclose(time_1, [0.7], atol=1e-08) - # t_11 = - for i in range(10): # t11 = 0.7 + 10*0.2 = 2.7 + for i in range(10): # t_11 = 0.7 + 10*0.2 = 2.7 D.execute(10) - time_12 = D.execute(10)[1] # t_12 = 2.7 + 0.2 = 2.9 - np.testing.assert_allclose(time_12, [2.9], atol=1e-08) + time_12 = D.execute(10)[1][0] # t_12 = 2.7 + 0.2 = 2.9 + np.testing.assert_allclose(time_12, [2.9], atol=1e-08) \ No newline at end of file From f200a2148dbd69d327f4f1a7d458263c64aa12fd Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 17:34:57 -0400 Subject: [PATCH 06/69] finishing cleaning up documentation on DriftDiffusionIntegrator --- PsyNeuLink/Components/Functions/Function.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 24641b743f3..05ab87c290d 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4082,14 +4082,14 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: previous_value + rate * new_value * time_step_size + ( - time_step_size * noise) * np.random.normal() + Return: previous_value + rate * variable * time_step_size + :math:`\\sqrt{time_step_size * noise}` * random + sample from Normal distribution Arguments --------- variable : number, list or np.array : default ClassDefaults.variable - a single value or array of values to be integrated. + the stimulus component of drift rate in the Drift Diffusion Model. params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the From e92a9d64d82a185d3632ce429355d9bf236462a8 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 30 Aug 2017 17:44:23 -0400 Subject: [PATCH 07/69] adding previous_time and t0 attribs to OrnsteinUhlenbeckIntegrator as well + pytest --- PsyNeuLink/Components/Functions/Function.py | 6 ++++- tests/mechanisms/test_integrator_mechanism.py | 23 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 05ab87c290d..481f23bb178 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4089,7 +4089,7 @@ def function(self, --------- variable : number, list or np.array : default ClassDefaults.variable - the stimulus component of drift rate in the Drift Diffusion Model. + the stimulus component of drift rate in the Drift Diffusion Model. params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the @@ -4262,6 +4262,7 @@ def __init__(self, noise=0.0, offset: parameter_spec = 0.0, time_step_size=1.0, + t0=0.0, decay = 1.0, initializer=ClassDefaults.variable, params: tc.optional(dict) = None, @@ -4274,6 +4275,7 @@ def __init__(self, time_step_size=time_step_size, decay = decay, initializer=initializer, + t0=t0, noise=noise, offset=offset, params=params) @@ -4289,6 +4291,7 @@ def __init__(self, # Reassign to kWInitializer in case default value was overridden self.previous_value = self.initializer + self.previous_time = self.t0 self.auto_dependent = True @@ -4355,6 +4358,7 @@ def function(self, if not context or not INITIALIZING in context: self.previous_value = adjusted_value + self.previous_time += time_step_size return adjusted_value diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index a9f4e528349..55e65f7c6b1 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -119,6 +119,29 @@ def test_ornstein_uhlenbeck_integrator(self): assert (val, val2, val3) == (20.5, 31, 41.5) + def test_ornstein_uhlenbeck_integrator_time(self): + OU = IntegratorMechanism( + function=OrnsteinUhlenbeckIntegrator( + initializer=10.0, + rate=10, + time_step_size=0.2, + t0=0.5, + decay=0.1, + offset=10, + ) + ) + time_0 = OU.function_object.previous_time # t_0 = 0.5 + np.testing.assert_allclose(time_0, [0.5], atol=1e-08) + + OU.execute(10) + time_1 = OU.function_object.previous_time # t_1 = 0.5 + 0.2 = 0.7 + np.testing.assert_allclose(time_1, [0.7], atol=1e-08) + + for i in range(11): # t_11 = 0.7 + 10*0.2 = 2.7 + OU.execute(10) + time_12 = OU.function_object.previous_time # t_12 = 2.7 + 0.2 = 2.9 + np.testing.assert_allclose(time_12, [2.9], atol=1e-08) + def test_integrator_no_function(self): I = IntegratorMechanism(time_scale=TimeScale.TIME_STEP) # P = process(pathway=[I]) From f0d1730fc0b0cc48c4959f8afd3a716c8203c332 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 31 Aug 2017 13:20:28 -0400 Subject: [PATCH 08/69] cleaning up documentation for the rest of the integrator functions, and modifying OU function to match the expression in the DDM paper --- PsyNeuLink/Components/Functions/Function.py | 82 +++++++++++---------- 1 file changed, 43 insertions(+), 39 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 481f23bb178..d28c67ebb47 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -3919,6 +3919,8 @@ class DriftDiffusionIntegrator( scale: parameter_spec = 1.0, \ offset: parameter_spec = 0.0, \ time_step_size=1.0, \ + t0=0.0, \ + decay=0.0, \ initializer, \ params=None, \ owner=None, \ @@ -3927,7 +3929,7 @@ class DriftDiffusionIntegrator( .. _DriftDiffusionIntegrator: - Accumulate evidence overtime based on a stimulus and previous position. + Accumulate evidence overtime based on a stimulus, previous position, and noise. Arguments --------- @@ -3973,8 +3975,7 @@ class DriftDiffusionIntegrator( ---------- variable : number or np.array - current input value some portion of which (determined by `rate `) that will be - added to the prior value; if it is an array, each element is independently integrated. + current input value, which represents the stimulus component of drift. rate : float or 1d np.array determines the rate of integration based on current and prior values. If integration_type is set to ADAPTIVE, @@ -4003,7 +4004,8 @@ class DriftDiffusionIntegrator( If initializer is a list or array, it must be the same length as `variable `. previous_time : float - stores previous time at which the function was executed and accumulates according to time_step_size + stores previous time at which the function was executed and accumulates with each execution according to + `time_step_size `. previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. @@ -4082,7 +4084,9 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: previous_value + rate * variable * time_step_size + :math:`\\sqrt{time_step_size * noise}` * random + Return: One time step of evidence accumulation according to the Drift Diffusion Model + + previous_value + rate * variable * time_step_size + :math:`\\sqrt{time_step_size * noise}` * random sample from Normal distribution Arguments @@ -4123,7 +4127,7 @@ def function(self, time_step_size * noise) * np.random.normal() adjusted_value = value + offset - # If this NOT an initialization run, update the old value + # If this NOT an initialization run, update the old value and time # If it IS an initialization run, leave as is # (don't want to count it as an execution step) if not context or not INITIALIZING in context: @@ -4142,6 +4146,7 @@ class OrnsteinUhlenbeckIntegrator( scale: parameter_spec = 1.0, \ offset: parameter_spec = 0.0, \ time_step_size=1.0, \ + t0=0.0, \ initializer, \ params=None, \ owner=None, \ @@ -4150,7 +4155,7 @@ class OrnsteinUhlenbeckIntegrator( .. _OrnsteinUhlenbeckIntegrator: - Integrate current value of `variable ` with its prior value. + Accumulate evidence overtime based on a stimulus, noise, decay, and previous position. Arguments --------- @@ -4172,6 +4177,10 @@ class OrnsteinUhlenbeckIntegrator( determines the timing precision of the integration process (see `time_step_size ` for details. + t0 : float : default 0.0 + represents the starting time of the model and is used to compute + `previous_time ` + initializer float, list or 1d np.array : default 0.0 specifies starting value for integration. If it is a list or array, it must be the same length as `default_variable ` (see `initializer @@ -4194,17 +4203,19 @@ class OrnsteinUhlenbeckIntegrator( ---------- variable : number or np.array - current input value some portion of which (determined by `rate `) that will be - added to the prior value; if it is an array, each element is independently integrated. + current input value which represents the stimulus component of drift. The product of + `variable ` and `rate ` is multiplied + by `time_step_size ` to model the accumulation of evidence during + one step. rate : float or 1d np.array - determines the rate of integration based on current and prior values. If integration_type is set to ADAPTIVE, - all elements must be between 0 and 1 (0 = no change; 1 = instantaneous change). If it has a single element, it - applies to all elements of `variable `; if it has more than one element, each element - applies to the corresponding element of `variable `. + represents the attentional component of drift. The product of `rate ` and + `variable ` is multiplied by + `time_step_size ` to model the accumulation of evidence during + one step. noise : float, function, list, or 1d np.array - scales the random value to be added in each call to `function + scales the random value to be added in each call to `function ` Noise must be specified as a float (or list or array of floats) because this value will be used to construct the standard DDM probability distribution. @@ -4215,21 +4226,18 @@ class OrnsteinUhlenbeckIntegrator( initializer : float, 1d np.array or list determines the starting value for integration (i.e., the value to which - `previous_value ` is set. - - If initializer is a list or array, it must be the same length as `variable `. - - TBI: + `previous_value ` is originally set.) - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. + If initializer is a list or array, it must be the same length as `variable + `. previous_value : 1d np.array : default ClassDefaults.variable stores previous value with which `variable ` is integrated. + previous_time : float + stores previous time at which the function was executed and accumulates with each execution according to + `time_step_size `. + owner : Mechanism `component ` to which the Function has been assigned. @@ -4307,14 +4315,18 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: some fraction of `variable ` combined with some fraction of - `previous_value ` + Return: One time step of evidence accumulation according to the Ornstein Uhlenbeck Model + + previous_value + decay * (previous_value - rate * variable) + :math:`\\sqrt{time_step_size * noise}` * random + sample from Normal distribution + Arguments --------- variable : number, list or np.array : default ClassDefaults.variable - a single value or array of values to be integrated. + the stimulus component of drift rate in the Drift Diffusion Model. + params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the @@ -4347,11 +4359,11 @@ def function(self, previous_value = np.atleast_2d(previous_value) new_value = variable - - value = previous_value + decay * rate * new_value * time_step_size + np.sqrt( + # dx = (lambda*x + A)dt + c*dW + value = previous_value + decay * (previous_value - rate * new_value) * time_step_size + np.sqrt( time_step_size * noise) * np.random.normal() - # If this NOT an initialization run, update the old value + # If this NOT an initialization run, update the old value and time # If it IS an initialization run, leave as is # (don't want to count it as an execution step) adjusted_value = value + offset @@ -4396,7 +4408,7 @@ class AccumulatorIntegrator( `variable `. increment : float, list or 1d np.array : default 0.0 - specifies an amount to be added to `prevous_value ` in each call to + specifies an amount to be added to `previous_value ` in each call to `function ` (see `increment ` for details). If it is a list or array, it must be the same length as `variable ` (see `increment ` for details). @@ -4467,14 +4479,6 @@ class AccumulatorIntegrator( ` is set. If initializer is a list or array, it must be the same length as `variable `. - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. - previous_value : 1d np.array : default ClassDefaults.variable stores previous value to which `rate ` and `noise ` will be added. From 85d9d21c972763becde149109770c4949294e708 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 31 Aug 2017 17:42:40 -0400 Subject: [PATCH 09/69] continuing to clean up DDM mechanism documention - added a table that shows that categorizes functions based on type (analytic vs integrator) and output state options --- .../Mechanisms/ProcessingMechanisms/DDM.py | 60 ++++++++++++------- 1 file changed, 40 insertions(+), 20 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 967d285da13..0a4c1d5f2be 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -23,22 +23,26 @@ The DDM Mechanism implements the "Drift Diffusion Model" (also know as the Diffusion Decision, Accumulation to Bound, Linear Integrator, and Wiener Process First Passage Time Model [REFS]). This corresponds to a continuous version of the sequential probability ratio test (SPRT [REF]), that is the statistically optimal procedure for two alternative -forced choice (TAFC) decision making ([REF]). It can be executed analytically using one of two solutions (`TRIAL` -mode), or integrated numerically (`integration mode `). +forced choice (TAFC) decision making ([REF]). + +The DDM Mechanism may be constructed with a choice of several functions that fall into to general categories: analytic +solutions and path integration (see `DDM_Modes` below for more about these options.) .. _DDM_Creation: Creating a DDM Mechanism ----------------------------- A DDM Mechanism can be instantiated directly by calling its constructor, or by using the `mechanism` command and -specifying DDM as its **mech_spec** argument. The analytic solution used `analytic mode ` is -selected using the `function ` argument, which can be simply the name of a DDM function (first example -below), or a call to the function with arguments specifying its parameters (second example below; see `DDM_Execution` -for a description of DDM function parameters):: +specifying DDM as its **mech_spec** argument. The model implementation is selected using the `function ` +argument. The function selection can be simply the name of a DDM function:: my_DDM = DDM(function=BogaczEtAl) + +or a call to the function with arguments specifying its parameters:: + my_DDM = DDM(function=BogaczEtAl(drift_rate=0.2, threshold=1.0)) + COMMENT: .. _DDM_Input: **Input**. The `default_variable` argument specifies the default value to use as the stimulus component of the @@ -61,23 +65,39 @@ The DDM Mechanism implements a general form of the decision process. A DDM Mechanism has a single `InputState`, the `value ` of which is assigned to the **input** specified by its `execute ` or `run -` methods, and that is used as the **drift_rate** for the process. That parameter, along with all +` methods, which represents the stimulus for the process. That parameter, along with all of the others for the DDM, must be assigned as parameters of the DDM's `function ` (see examples under `DDM_Modes` below, and individual `Functions ` for additional details). -The decision process can be configured to operate in two different `modes `, as determined by the assignment -made to its `function `. In the `analytic mode` ` it generates a single estimated for -the process; in the `path integration mode `, it carries out step-wise integration of the Process -(see `DDM_Modes` and `DDM_Execution` for additional details). - -The `value ` of the DDM Mechanism has six items. The first two of these are always assigned, and represented -by two `OutputStates ` in the DDM's output_states ` attribute: `DECISION_VARIABLE -` and `RESPONSE_TIME `. Other items of its `value `, and -corresponding OutputStates in its `output_states ` attribute, may also be assigned, depending on the -`function ` (and corresponding mode of operation) that has been specified, as described below. Unassigned -items of the DDM's `value ` attribute are given the value `None`. The set of `output_states -` assigned can be customized by selecting ones from the DDM's set of `Standard OutputStates -`), and specifying these in the **output_states** argument of its constructor. +The DDM Mechanism can generate two different types of results depending on which function is selected. When a +function representing an analytic solution is selected, the mechanism generates a single estimation for the process. +When the path integration function is selected, the mechanism carries out step-wise integration of the process; each +execution of the mechanism computes one step. (see `DDM_Modes` and `DDM_Execution` for additional details). + +The `value ` of the DDM Mechanism may have up to six items. The first two of these are always assigned, and +are represented by the DDM Mechanism's two default `output_states `: `DECISION_VARIABLE +` and `RESPONSE_TIME `. The other `output_states ` may be +assigned depending on (1) whether the selected function produces those quantities and (2) customization. + ++---------------------------------+-----------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +|**Function** |**Type** | **Output States** | +| | +------------------------+--------------------+----------------------------------+-----------------------------------+----------------------+--------------------------+ +| | |`DECISION_VARIABLE |`RESPONSE_TIME |`PROBABILITY_UPPER_THRESHOLD |`PROBABILITY_LOWER_THRESHOLD |`RT_CORRECT_MEAN |`RT_CORRECT_VARIANCE | +| | |`|`|`|` |`|`| ++---------------------------------+-----------+------------------------+--------------------+----------------------------------+-----------------------------------+----------------------+--------------------------+ +|`BogaczEtAl ` |Analytic | X | X | X | X | | | ++---------------------------------+-----------+------------------------+--------------------+----------------------------------+-----------------------------------+----------------------+--------------------------+ +|`NavarroAndFuss `|Analytic | X | X | X | X | X | X | ++---------------------------------+-----------+------------------------+--------------------+----------------------------------+-----------------------------------+----------------------+--------------------------+ +|`DriftDiffusionIntegrator |Path | | | | | | | +|` |Integration| X | X | | | | | ++---------------------------------+-----------+------------------------+--------------------+----------------------------------+-----------------------------------+----------------------+--------------------------+ + +The set of `output_states ` assigned can be customized by selecting ones from the DDM's set of +`Standard OutputStates `), and specifying these in the **output_states** argument of its +constructor. Some `OutputStates `, or elements of `value `, represent slightly different quantities +depending on the function in which they are computed. See `Standard OutputStates ` for more +details. .. _DDM_Modes: From 40915aa10b6cb4d2e2fae35ac3160326badb78b9 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 31 Aug 2017 18:09:41 -0400 Subject: [PATCH 10/69] more DDM mechanism documentation cleanup --- .../Mechanisms/ProcessingMechanisms/DDM.py | 76 +++++++++---------- 1 file changed, 36 insertions(+), 40 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 0a4c1d5f2be..82f6e18991e 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -101,22 +101,25 @@ .. _DDM_Modes: -DDM Modes of Operation +DDM Function Types ~~~~~~~~~~~~~~~~~~~~~~ .. _DDM_Analytic_Mode: -Analytic Mode -^^^^^^^^^^^^^ - -This is used when one of the two `Functions ` that calculate an analytic solution -- `BogaczEtAl ` -or `NavarroAndFuss ` -- is specified as the Mechanism's `function `. It generates a -single estimate of the outcome for the decision process (see `DDM_Execution` for details). In addition to -`DECISION_VARIABLE ` and `RESPONSE_TIME `, both Functions return an accuracy -value (represented in the `PROBABILITY_UPPER_THRESHOLD ` OutputState), and an error -rate value (in the `PROBABILITY_LOWER_THRESHOLD ` OutputState; the `NavarroAndFuss -` Function also returns expected values for mean correct response time (`RT_CORRECT_MEAN -` and variance of correct response times (`RT_CORRECT_VARIANCE `. +Analytic Solutions +^^^^^^^^^^^^^^^^^^ + +The two Drift Diffusion Model `Functions ` that calculate analytic solutions are `BogaczEtAl ` +and `NavarroAndFuss `. When one of these functions is specified as the DDM Mechanism's +`function `, the mechanism generates a single estimate of the outcome for the decision process (see +`DDM_Execution` for details). + +In addition to `DECISION_VARIABLE ` and `RESPONSE_TIME `, both Functions +return an accuracy value (represented in the `PROBABILITY_UPPER_THRESHOLD ` +OutputState), and an error rate value (in the `PROBABILITY_LOWER_THRESHOLD ` +OutputState; the `NavarroAndFuss ` Function also returns expected values for mean correct response time +(`RT_CORRECT_MEAN ` and variance of correct response times (`RT_CORRECT_VARIANCE `. + Examples for each, that illustrate all of their parameters, are shown below: `BogaczEtAl ` Function:: @@ -126,8 +129,7 @@ threshold=30.0, noise=1.5, t0 = 2.0), - time_scale= TimeScale.TRIAL, - name='MY_DDM_BogaczEtAl') + name='my_DDM_BogaczEtAl') `NavarroAndFuss ` Function:: @@ -136,31 +138,25 @@ threshold=30.0, noise=1.5, t0 = 2.0), - time_scale= TimeScale.TRIAL, - name='MY_DDM_NavarroAndFuss') + name='my_DDM_NavarroAndFuss') .. _DDM_Integration_Mode: -Path Integration Mode -~~~~~~~~~~~~~~~~~~~~~ - -COMMENT: - IS THIS MORE CORRECT FOR THE BELOW: - This is used when `DriftDiffusionIntegrator` is specified as the DDM's `function ` - attribute. -COMMENT +Path Integration +^^^^^^^^^^^^^^^^ -This is used when an `Integrator` Function with an `integration_type ` of *DIFFUSION* is -specified as the DDM's `function ` attribute. In this case, the DDM Mechanism uses the `Euler method -`_ to carry out numerical step-wise integration of the decision process -(see `Execution ` below). In this mode, only the `DECISION_VARIABLE ` and -`RESPONSE_TIME ` are returned by default. +The Drift Diffusion Model `Function ` that calculates a path integration is `DriftDiffusionIntegrator +`. The DDM Mechanism uses the `Euler method `_ to +carry out numerical step-wise integration of the decision process (see `Execution ` below). In this +mode, only the `DECISION_VARIABLE ` and `RESPONSE_TIME ` are available. `Integrator ` Function:: - my_DDM_TimeStep = DDM(function=DriftDiffusionIntegrator(noise=0.5, initializer = 0.0), - time_scale=TimeScale.TIME_STEP, - name='My_DDM_TimeStep') + my_DDM_path_integrator = DDM(function=DriftDiffusionIntegrator(noise=0.5, + initializer = 1.0, + t0 = 2.0, + rate = 3.0), + name='my_DDM_path_integrator') COMMENT: [TBI - MULTIPROCESS DDM - REPLACE ABOVE] @@ -271,14 +267,14 @@ `numerical step-wise integration ` of its path. The method used is determined by its `function ` (see `DDM_Modes`). The DDM's `function ` always returns values for the `DECISION_VARIABLE ` and `RESPONSE_TIME `, and assigns these as the first two items of its `value -` attribute, irrespective of its `mode ` of operation. The mode of operation is determined by -the Function assigned to its `function ` attribute (see `DDM_Structure`). In the `analytic mode -` the same set of values is returned for every execution, that are determined entirely by the set of -parameters passed to its `function `; generally, this corresponds to a `TRIAL` of execution. In the -`path intergration mode `, a single step of integration is conducted each time the Mechanism is -executed; generally, this corresponds to a `TIME_STEP` of execution. In addition to `DECISION_VARIABLE -` and `RESPONSE_TIME `, other values are returned by the different modes and -functions (see `DDM_Modes` and `Standard OutputStates `). +` attribute, irrespective of its function. + +When an `analytic ` function is selected, the same set of values is returned for every execution, +that are determined entirely by the set of parameters passed to its `function `; generally, this +corresponds to a `TRIAL` of execution. + +When the `path integration `, function is selected, a single step of integration is conducted each +time the Mechanism is executed; generally, this corresponds to a `TIME_STEP` of execution. .. _DDM_Class_Reference: From f38ae0c23cf316ccf2ace5c98868a7400c840d0e Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 11:10:55 -0400 Subject: [PATCH 11/69] small typos and clarifications in TransferMechanism docs --- .../Mechanisms/ProcessingMechanisms/TransferMechanism.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py index ad657ea63fe..051fcc2d791 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py @@ -86,8 +86,9 @@ * `range `: caps all elements of the `function ` result by the lower and upper values specified by range. .. - * `integrator_mode `: when `integrator_mode ` - is set to True, a TransferMechanism exponentially time-averages its input before transforming it. + * `integrator_mode `: determines whether the input will be time-averaged before + passing through the function of the mechanisms. When `integrator_mode ` is set + to True, the TransferMechanism exponentially time-averages its input before transforming it. .. * `time_constant `: if the `integrator_mode ` attribute is set to True, the `time_constant ` attribute is the rate of @@ -199,7 +200,6 @@ class TransferMechanism(ProcessingMechanism_Base): time_constant=1.0, \ integrator_mode=False, \ range=(float:min, float:max),\ - time_scale=TimeScale.TRIAL, \ params=None, \ name=None, \ prefs=None) @@ -251,7 +251,7 @@ class TransferMechanism(ProcessingMechanism_Base): initial_value : value, list or np.ndarray : default Transfer_DEFAULT_BIAS specifies the starting value for time-averaged input (only relevant if `integrator_mode - ` is True and `time_constant ` is not 1.0). + ` is True). :py:data:`Transfer_DEFAULT_BIAS SHOULD RESOLVE TO VALUE>` noise : float or function : default 0.0 From c208b17d7927fd6f04b58974b85921e007b019bf Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 11:40:51 -0400 Subject: [PATCH 12/69] removing last reference to TimeScale in validation --- .../Mechanisms/ProcessingMechanisms/DDM.py | 41 ++++--------------- 1 file changed, 8 insertions(+), 33 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 82f6e18991e..a38e2c10cc2 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -445,20 +445,20 @@ class DDM(ProcessingMechanism_Base): DDM is a subclass Type of the Mechanism Category of the Component class It implements a Mechanism for several forms of the Drift Diffusion Model (DDM) for two alternative forced choice (2AFC) decision making: - - Bogacz et al. (2006) analytic solution (TimeScale.TRIAL mode -- see kwBogaczEtAl option below): + - Bogacz et al. (2006) analytic solution: generates error rate (ER) and decision time (DT); ER is used to stochastically generate a decision outcome (+ or - valued) on every run - - Navarro and Fuss (2009) analytic solution (TImeScale.TRIAL mode -- see kwNavarrosAndFuss: + - Navarro and Fuss (2009) analytic solution: generates error rate (ER), decision time (DT) and their distributions; ER is used to stochastically generate a decision outcome (+ or - valued) on every run - - stepwise integrator that simulates each step of the integration process (TimeScale.TIME_STEP mode) + - stepwise integrator that simulates each step of the integration process Class attributes ---------------- + componentType (str): DDM + classPreference (PreferenceSet): DDM_PreferenceSet, instantiated in __init__() + classPreferenceLevel (PreferenceLevel): PreferenceLevel.TYPE + ClassDefaults.variable (value): STARTING_POINT - + paramClassDefaults (dict): {TIME_SCALE: TimeScale.TRIAL, + + paramClassDefaults (dict): { kwDDM_AnalyticSolution: kwBogaczEtAl, FUNCTION_PARAMS: {DRIFT_RATE:<> STARTING_POINT:<> @@ -498,14 +498,6 @@ class DDM(ProcessingMechanism_Base): specifies the function to use to `execute ` the decision process; determines the mode of execution (see `function ` and `DDM_Modes` for additional information). - COMMENT: - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the Mechanism is executed on the time_step or trial time scale. - This must be set to `TimeScale.TRIAL` to use one of the analytic solutions specified by - `function `. This must be set to `TimeScale.TIME_STEP` to numerically (path) integrate the - decision variable. - COMMENT - params : Optional[Dict[param keyword, param value]] a dictionary that can be used to specify parameters of the Mechanism, parameters of its `function `, and/or a custom function and its parameters (see `Mechanism ` for specification of @@ -567,11 +559,6 @@ class DDM(ProcessingMechanism_Base): **output_states** argument of the DDM's constructor (see `DDM Standard OutputStates `). - COMMENT: - time_scale : TimeScale : default TimeScale.TRIAL - determines the `TimeScale` at which the decision process is executed. - COMMENT - name : str : default DDM- the name of the Mechanism. Specified in the name argument of the call to create the projection; @@ -816,21 +803,11 @@ def _validate_params(self, request_set, target_set=None, context=None): raise DDMError("{} param of {} must be one of the following functions: {}". format(FUNCTION, self.name, function_names)) - if self.timeScale == TimeScale.TRIAL: - if function == Integrator: - raise DDMError("In TRIAL mode, the {} param of {} cannot be Integrator. Please choose an analytic " - "solution for the function param: BogaczEtAl or NavarroAndFuss.". - format(FUNCTION, self.name)) - else: - if function != DriftDiffusionIntegrator: - raise DDMError("In TIME_STEP mode, the {} param of {} " - "must be DriftDiffusionIntegrator.". - format(FUNCTION, self.name)) - else: - self.get_axes_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], - noise=self.function_params['noise'], context='plot').function - self.plot_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], + if isinstance(function, DriftDiffusionIntegrator): + self.get_axes_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], noise=self.function_params['noise'], context='plot').function + self.plot_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], + noise=self.function_params['noise'], context='plot').function if not isinstance(function, NavarroAndFuss) and OUTPUT_STATES in target_set: # OUTPUT_STATES is a list, so need to delete the first, so that the index doesn't go out of range @@ -883,7 +860,6 @@ def _execute(self, + kwDDM_Bias (float) + NON_DECISION_TIME (float) + NOISE (float) - - time_scale (TimeScale): specifies "temporal granularity" with which Mechanism is executed - context (str) Returns the following values in self.value (2D np.array) and in the value of the corresponding outputState in the self.outputStates dict: @@ -895,7 +871,6 @@ def _execute(self, :param self: :param variable (float) :param params: (dict) - :param time_scale: (TimeScale) :param context: (str) :rtype self.outputState.value: (number) """ From 2e8610a472499d8d5b73b8d27de40eb8a228cff8 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 11:42:09 -0400 Subject: [PATCH 13/69] removing any references to TimeScale in Mechanism documentation in order to not confuse readers (at the moment, TimeScale still gets passed throughout component heirarchy, but it is not used) --- .../CompositionInterfaceMechanism.py | 8 -------- .../ProcessingMechanisms/IntegratorMechanism.py | 11 +---------- .../Mechanisms/ProcessingMechanisms/KWTA.py | 17 ++++------------- .../Mechanisms/ProcessingMechanisms/LCA.py | 17 ++++------------- .../ObjectiveMechanisms/ComparatorMechanism.py | 10 +--------- .../ObjectiveMechanisms/ObjectiveMechanism.py | 10 +--------- .../RecurrentTransferMechanism.py | 17 ++++------------- .../ProcessingMechanisms/TransferMechanism.py | 3 --- 8 files changed, 15 insertions(+), 78 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/CompositionInterfaceMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/CompositionInterfaceMechanism.py index 7436a15b428..730e6382342 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/CompositionInterfaceMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/CompositionInterfaceMechanism.py @@ -61,7 +61,6 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): default_input_value=None, \ size=None, \ function=Linear(slope = 1.0, intercept = 0.0), \ - time_scale=TimeScale.TRIAL, \ params=None, \ name=None, \ prefs=None) @@ -86,10 +85,6 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): specifies the function used to integrate the input. Must take a single numeric value, or a list or np.array of values, and return one of the same form. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed on the `TIME_STEP` or `TRIAL` time scale. - This must be set to `TimeScale.TIME_STEP` for the :keyword:`rate` parameter to have an effect. - params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that can be used to specify the parameters for the `Mechanism `, parameters for its `function `, and/or a @@ -111,9 +106,6 @@ class CompositionInterfaceMechanism(ProcessingMechanism_Base): variable : value: default the input to Mechanism's ``function``. - time_scale : TimeScale : defaultTimeScale.TRIAL - specifies whether the Mechanism is executed on the TIME_STEP or TRIAL time scale. - name : str : default CompositionInterfaceMechanism- the name of the Mechanism. Specified in the **name** argument of the constructor for the Mechanism; diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/IntegratorMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/IntegratorMechanism.py index 6c80e83a068..890f07f240d 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/IntegratorMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/IntegratorMechanism.py @@ -92,7 +92,6 @@ class IntegratorMechanism(ProcessingMechanism_Base): default_variable=None, \ size=None, \ function=AdaptiveIntegrator(rate=0.5), \ - time_scale=TimeScale.TRIAL, \ params=None, \ name=None, \ prefs=None) @@ -108,8 +107,7 @@ class IntegratorMechanism(ProcessingMechanism_Base): + classPreference (PreferenceSet): SigmoidLayer_PreferenceSet, instantiated in __init__() + classPreferenceLevel (PreferenceLevel): PreferenceLevel.TYPE + ClassDefaults.variable (value): SigmoidLayer_DEFAULT_BIAS - + paramClassDefaults (dict): {TIME_SCALE: TimeScale.TRIAL, - FUNCTION_PARAMS:{kwSigmoidLayer_Unitst: kwSigmoidLayer_NetInput + + paramClassDefaults (dict): {FUNCTION_PARAMS:{kwSigmoidLayer_Unitst: kwSigmoidLayer_NetInput kwSigmoidLayer_Gain: SigmoidLayer_DEFAULT_GAIN kwSigmoidLayer_Bias: SigmoidLayer_DEFAULT_BIAS}} Class methods: @@ -139,10 +137,6 @@ class IntegratorMechanism(ProcessingMechanism_Base): specifies the function used to integrate the input. Must take a single numeric value, or a list or np.array of values, and return one of the same form. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed on the `TIME_STEP` or `TRIAL` time scale. - This must be set to `TimeScale.TIME_STEP` for the :keyword:`rate` parameter to have an effect. - params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that can be used to specify the parameters for the Mechanism, parameters for its `function `, and/or a custom function and its @@ -164,9 +158,6 @@ class IntegratorMechanism(ProcessingMechanism_Base): variable : value: default the input to Mechanism's ``function``. - time_scale : TimeScale : defaultTimeScale.TRIAL - specifies whether the Mechanism is executed on the TIME_STEP or TRIAL time scale. - name : str : default IntegratorMechanism- the name of the Mechanism. Specified in the **name** argument of the constructor for the Mechanism; diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/KWTA.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/KWTA.py index ed8f95c196b..83abc98aadd 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/KWTA.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/KWTA.py @@ -130,7 +130,6 @@ class KWTA(RecurrentTransferMechanism): inhibition_only=True, \ average_based=False, \ range=None, \ - time_scale=TimeScale.TRIAL, \ params=None, \ name=None, \ prefs=None) @@ -190,8 +189,8 @@ class KWTA(RecurrentTransferMechanism): if it is a function, it must return a scalar value. time_constant : float : default 1.0 - the time constant for exponential time averaging of input when the mechanism is executed with `time_scale` - set to `TimeScale.TIME_STEP`:: + the time constant for exponential time averaging of input when `integrator_mode ` is set + to True :: result = (time_constant * current input) + (1-time_constant * result on previous time_step) @@ -239,11 +238,6 @@ class KWTA(RecurrentTransferMechanism): the mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - This must be set to `TimeScale.TIME_STEP` for the `time_constant ` - parameter to have an effect. - name : str : default KWTA- a string used for the name of the mechanism. If not is specified, a default is assigned by `MechanismRegistry` @@ -291,8 +285,8 @@ class KWTA(RecurrentTransferMechanism): if it is a function, it must return a scalar value. time_constant : float - the time constant for exponential time averaging of input - when the Mechanism is executed using the `TIME_STEP` `TimeScale`:: + the time constant for exponential time averaging of input when `integrator_mode ` is set + to True:: result = (time_constant * current input) + (1-time_constant * result on previous time_step) @@ -359,9 +353,6 @@ class KWTA(RecurrentTransferMechanism): * **energy** of the result (``value`` of ENERGY outputState); * **entropy** of the result (if the ENTROPY outputState is present). - time_scale : TimeScale - specifies whether the mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - name : str : default KWTA- the name of the Mechanism. Specified in the **name** argument of the constructor for the Projection; diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/LCA.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/LCA.py index 5336ac804eb..12d41aed8e5 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/LCA.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/LCA.py @@ -181,7 +181,6 @@ class LCA(RecurrentTransferMechanism): noise=0.0, \ time_constant=1.0, \ range=(float:min, float:max), \ - time_scale=TimeScale.TIME_STEP, \ params=None, \ name=None, \ prefs=None) @@ -234,8 +233,8 @@ class LCA(RecurrentTransferMechanism): if it is a function, it must return a scalar value. time_constant : float : default 1.0 - the time constant for exponential time averaging of input when the Mechanism is executed with `time_scale` - set to `TimeScale.TIME_STEP` + the time constant for exponential time averaging of input when `integrator_mode ` is set + to True:: `result = (time_constant * current input) + (1-time_constant * result on previous time_step)` @@ -250,11 +249,6 @@ class LCA(RecurrentTransferMechanism): the Mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - This must be set to `TimeScale.TIME_STEP` for the `time_constant ` - parameter to have an effect. - name : str : default TransferMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` @@ -308,8 +302,8 @@ class LCA(RecurrentTransferMechanism): if it is a function, it must return a scalar value. time_constant : float - the time constant for exponential time averaging of input - when the Mechanism is executed using the `TIME_STEP` `TimeScale`:: + the time constant for exponential time averaging of input when `integrator_mode ` is set + to True:: result = (time_constant * current input) + (1-time_constant * result on previous time_step) @@ -362,9 +356,6 @@ class LCA(RecurrentTransferMechanism): * **max_vs_next** of the result (:keyword:`value` of MAX_VS_NEXT OutputState); * **max_vs_avg** of the result (:keyword:`value` of MAX_VS_AVG OutputState). - time_scale : TimeScale - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - name : str : default TransferMechanism- the name of the Mechanism. Specified in the **name** argument of the constructor for the projection; diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py index b5d835e7f04..4f6ca4e8214 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py @@ -199,8 +199,7 @@ class ComparatorMechanism(ObjectiveMechanism): + classPreference (PreferenceSet): Comparator_PreferenceSet, instantiated in __init__() + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE + ClassDefaults.variable (value): Comparator_DEFAULT_STARTING_POINT // QUESTION: What to change here - + paramClassDefaults (dict): {TIME_SCALE: TimeScale.TRIAL, - FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}} + + paramClassDefaults (dict): {FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}} Class methods: None @@ -237,13 +236,6 @@ class ComparatorMechanism(ObjectiveMechanism): the dictionary override any assigned to those parameters in arguments of the constructor. - COMMENT: - [TBI] - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed on the TIME_STEP or TRIAL time scale. - This must be set to :keyword:`TimeScale.TIME_STEP` for the ``rate`` parameter to have an effect. - COMMENT - name: str : default ComparatorMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ObjectiveMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ObjectiveMechanism.py index 272bd2b9b66..f062d45fdf9 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ObjectiveMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ObjectiveMechanism.py @@ -315,8 +315,7 @@ class ObjectiveMechanism(ProcessingMechanism_Base): + classPreference (PreferenceSet): Comparator_PreferenceSet, instantiated in __init__() + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE + ClassDefaults.variable (value): Comparator_DEFAULT_STARTING_POINT // QUESTION: What to change here - + paramClassDefaults (dict): {TIME_SCALE: TimeScale.TRIAL, - FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}} + + paramClassDefaults (dict): {FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}} Class methods: None @@ -362,13 +361,6 @@ class ObjectiveMechanism(ProcessingMechanism_Base): the dictionary override any assigned to those parameters in arguments of the constructor. - COMMENT: - [TBI] - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed on the TIME_STEP or TRIAL time scale. - This must be set to :keyword:`TimeScale.TIME_STEP` for the ``rate`` parameter to have an effect. - COMMENT - name : str : default ObjectiveMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/RecurrentTransferMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/RecurrentTransferMechanism.py index bdc89dfe9a9..b6f40c516fc 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/RecurrentTransferMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/RecurrentTransferMechanism.py @@ -187,7 +187,6 @@ class RecurrentTransferMechanism(TransferMechanism): noise=0.0, \ time_constant=1.0, \ range=(float:min, float:max), \ - time_scale=TimeScale.TRIAL, \ params=None, \ name=None, \ prefs=None) @@ -257,8 +256,8 @@ class RecurrentTransferMechanism(TransferMechanism): if it is a function, it must return a scalar value. time_constant : float : default 1.0 - the time constant for exponential time averaging of input when the Mechanism is executed with `time_scale` - set to `TimeScale.TIME_STEP`:: + the time constant for exponential time averaging of input when `integrator_mode + ` is set to True:: result = (time_constant * current input) + (1-time_constant * result on previous time_step) @@ -274,11 +273,6 @@ class RecurrentTransferMechanism(TransferMechanism): the Mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : TimeScale.TRIAL - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - This must be set to `TimeScale.TIME_STEP` for the `time_constant ` - parameter to have an effect. - name : str : default RecurrentTransferMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` @@ -326,8 +320,8 @@ class RecurrentTransferMechanism(TransferMechanism): if it is a function, it must return a scalar value. time_constant : float - the time constant for exponential time averaging of input - when the Mechanism is executed using the `TIME_STEP` `TimeScale`:: + the time constant for exponential time averaging of input when `integrator_mode + ` is set to True:: result = (time_constant * current input) + (1-time_constant * result on previous time_step) @@ -374,9 +368,6 @@ class RecurrentTransferMechanism(TransferMechanism): * **energy** of the result (``value`` of ENERGY outputState); * **entropy** of the result (if the ENTROPY outputState is present). - time_scale : TimeScale - specifies whether the Mechanism is executed using the `TIME_STEP` or `TRIAL` `TimeScale`. - name : str : default RecurrentTransferMechanism- the name of the Mechanism. Specified in the **name** argument of the constructor for the Projection; diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py index 051fcc2d791..5b2808a75a2 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/TransferMechanism.py @@ -219,7 +219,6 @@ class TransferMechanism(ProcessingMechanism_Base): + classPreference (PreferenceSet): Transfer_PreferenceSet, instantiated in __init__() + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE + ClassDefaults.variable (value): Transfer_DEFAULT_BIAS - + paramClassDefaults (dict): {TIME_SCALE: TimeScale.TRIAL} Class methods ------------- @@ -631,7 +630,6 @@ def _execute(self, + NOISE (float) + TIME_CONSTANT (float) + RANGE ([float, float]) - - time_scale (TimeScale): specifies "temporal granularity" with which Mechanism is executed - context (str) Returns the following values in self.value (2D np.array) and in @@ -643,7 +641,6 @@ def _execute(self, :param self: :param variable (float) :param params: (dict) - :param time_scale: (TimeScale) :param context: (str) :rtype self.outputState.value: (number) """ From 8b704c5fbe7b48f22575b25cc8a99891e8a8f622 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 12:08:20 -0400 Subject: [PATCH 14/69] updating OU pytests to match recent changes to OU integrator function --- tests/mechanisms/test_integrator_mechanism.py | 47 ++++++++++--------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index 55e65f7c6b1..321f12ca366 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -86,38 +86,36 @@ def test_drift_diffusion_integrator(self): def test_ornstein_uhlenbeck_integrator(self): I = IntegratorMechanism( function=OrnsteinUhlenbeckIntegrator( + decay=0.5, initializer=10.0, - rate=10, + rate=0.25, time_step_size=0.5, - decay=0.1, - offset=10, + noise = 0.0, + offset= 1.0 ) ) # P = process(pathway=[I]) - # value = previous_value + decay * rate * new_value * time_step_size + np.sqrt( + # value = previous_value + decay * (previous_value - rate * new_value) * time_step_size + np.sqrt( # time_step_size * noise) * np.random.normal() # step 1: + val = I.execute(1) - # value = 10 + 0.1*10*1*0.5 + 0 - # adjusted_value = 10.5 + 10 - # previous_value = 20.5 - # RETURN 20.5 + # value = 10 + 0.5 * ( 10.0 - 0.25*1.0) * 0.5 + sqrt(0.25*0)*random_sample + # = 10 + 0.5*9.75*0.5 + # = 12.4375 + # adjusted_value = 12.4375 + 1.0 + # previous_value = 13.4375 + # RETURN 13.4375 # step 2: val2 = I.execute(1) - # value = 20.5 + 0.1*10*1*0.5 + 0 - # adjusted_value = 21 + 10 - # previous_value = 31 + # value = 13.4375 + 0.5 * ( 13.4375 - 0.25*1.0) * 0.5 + # = 13.4375 + 3.296875 + # adjusted_value = 16.734375 + 1.0 + # previous_value = 17.734375 # RETURN 31 - # step 3: - val3 = I.execute(1) - # value = 31 + 0.1*10*1*0.5 + 0 - # adjusted_value = 31.5 + 10 - # previous_value = 41.5 - # RETURN 41.5 - - assert (val, val2, val3) == (20.5, 31, 41.5) + assert (val, val2) == (13.4375, 17.734375) def test_ornstein_uhlenbeck_integrator_time(self): OU = IntegratorMechanism( @@ -662,11 +660,16 @@ def test_integrator_ornstein_uhlenbeck_noise_val(self): I = IntegratorMechanism( name='IntegratorMechanism', function=OrnsteinUhlenbeckIntegrator( - noise=5.0, + noise=2.0, + decay=0.5, + initializer=1.0, + rate=0.25 ), time_scale=TimeScale.TIME_STEP ) - val = float(I.execute(10)) + # val = 1.0 + 0.5 * (1.0 - 0.25 * 2.5) * 1.0 + np.sqrt(1.0 * 2.0) * np.random.normal() - np.testing.assert_allclose(val, 15.010789523731438) + val = float(I.execute(2.5)) + + np.testing.assert_allclose(val, 4.356601554140335) From 705340861d492e15c82931ea21b95012c231119a Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 13:36:41 -0400 Subject: [PATCH 15/69] moving DDM plotting functions out of validation; cleaning up validation that overwrote 'function' built-in name --- .../Mechanisms/ProcessingMechanisms/DDM.py | 65 +++++-------------- 1 file changed, 15 insertions(+), 50 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index a38e2c10cc2..0d02c42ce7b 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -663,7 +663,7 @@ def __init__(self, size=size, # context=context) context=self) - + self._instantiate_plotting_functions() # # TEST PRINT # print("\n{} user_params:".format(self.name)) # for param in self.user_params.keys(): @@ -729,44 +729,6 @@ def plot(self, stimulus=1.0, threshold=10.0): # number of seconds to wait before next point is plotted time.sleep(.1) - - # - # import matplotlib.pyplot as plt - # plt.ion() - # - # # # Select a random seed to ensure that the test run will be the same as the real run - # seed_value = np.random.randint(0, 100) - # np.random.seed(seed_value) - # variable = stimulus - # - # result_check = 0 - # time_check = 0 - # - # while abs(result_check) < threshold: - # time_check += 1 - # result_check = self.get_axes_function(variable, context='plot') - # - # # Re-set random seed for the real run - # np.random.seed(seed_value) - # axes = plt.gca() - # axes.set_xlim([0, time_check]) - # axes.set_xlabel("Time Step", weight="heavy", size="large") - # axes.set_ylim([-1.25 * threshold, 1.25 * threshold]) - # axes.set_ylabel("Position", weight="heavy", size="large") - # plt.axhline(y=threshold, linewidth=1, color='k', linestyle='dashed') - # plt.axhline(y=-threshold, linewidth=1, color='k', linestyle='dashed') - # plt.plot() - # - # result = 0 - # time = 0 - # while abs(result) < threshold: - # time += 1 - # result = self.plot_function(variable, context='plot') - # plt.plot(time, float(result), '-o', color='r', ms=2.5) - # plt.pause(0.01) - # - # plt.pause(10000) - # MODIFIED 11/21/16 NEW: def _validate_variable(self, variable, context=None): """Ensures that input to DDM is a single value. @@ -794,21 +756,15 @@ def _validate_params(self, request_set, target_set=None, context=None): if FUNCTION in target_set: # If target_set[FUNCTION] is a method of a Function (e.g., being assigned in _instantiate_function), # get the Function to which it belongs - function = target_set[FUNCTION] - if isinstance(function, method_type): - function = function.__self__.__class__ + fun = target_set[FUNCTION] + if isinstance(fun, method_type): + fun = fun.__self__.__class__ - if not function in functions: - function_names = [function.componentName for function in functions] + if not fun in functions: + function_names = [function.componentName for fun in functions] raise DDMError("{} param of {} must be one of the following functions: {}". format(FUNCTION, self.name, function_names)) - if isinstance(function, DriftDiffusionIntegrator): - self.get_axes_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], - noise=self.function_params['noise'], context='plot').function - self.plot_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], - noise=self.function_params['noise'], context='plot').function - if not isinstance(function, NavarroAndFuss) and OUTPUT_STATES in target_set: # OUTPUT_STATES is a list, so need to delete the first, so that the index doesn't go out of range # if DDM_OUTPUT_INDEX.RT_CORRECT_VARIANCE.value in target_set[OUTPUT_STATES]: @@ -837,6 +793,15 @@ def _instantiate_attributes_before_function(self, context=None): super()._instantiate_attributes_before_function(context=context) + def _instantiate_plotting_functions(self, context=None): + if "DriftDiffusionIntegrator" in str(self.function): + self.get_axes_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], + noise=self.function_params['noise'], + context='plot').function + self.plot_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], + noise=self.function_params['noise'], context='plot').function + + def _execute(self, variable=None, runtime_params=None, From 735c923d3a53a163c11c3dbded2a8d5d14baa990 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 13:50:41 -0400 Subject: [PATCH 16/69] one more typo in cleaning up validation that overwrote 'function' built-in name --- PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 0d02c42ce7b..2878990d3d1 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -765,7 +765,7 @@ def _validate_params(self, request_set, target_set=None, context=None): raise DDMError("{} param of {} must be one of the following functions: {}". format(FUNCTION, self.name, function_names)) - if not isinstance(function, NavarroAndFuss) and OUTPUT_STATES in target_set: + if not isinstance(fun, NavarroAndFuss) and OUTPUT_STATES in target_set: # OUTPUT_STATES is a list, so need to delete the first, so that the index doesn't go out of range # if DDM_OUTPUT_INDEX.RT_CORRECT_VARIANCE.value in target_set[OUTPUT_STATES]: # del target_set[OUTPUT_STATES][DDM_OUTPUT_INDEX.RT_CORRECT_VARIANCE.value] From 6dfa5f4b1a1fa5fae5d6b873b40e774252b51ca0 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 14:15:37 -0400 Subject: [PATCH 17/69] more documentation changes --- PsyNeuLink/Components/Functions/Function.py | 8 ++++--- .../Mechanisms/ProcessingMechanisms/DDM.py | 3 ++- docs/source/AdaptiveMechanism.rst | 2 ++ tests/mechanisms/test_ddm_mechanism.py | 21 ++++++++++++++----- 4 files changed, 25 insertions(+), 9 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index d28c67ebb47..6672a78da98 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4123,9 +4123,11 @@ def function(self, previous_value = np.atleast_2d(previous_value) new_value = variable - value = previous_value + rate * new_value * time_step_size + np.sqrt( - time_step_size * noise) * np.random.normal() - + value = previous_value + rate * new_value * time_step_size \ + # + np.sqrt( + # time_step_size * noise) * np.random.normal() + print("noise = ", np.sqrt(0.5) * np.random.normal()) + print() adjusted_value = value + offset # If this NOT an initialization run, update the old value and time # If it IS an initialization run, leave as is diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py index 2878990d3d1..5b3513d2013 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/DDM.py @@ -799,7 +799,8 @@ def _instantiate_plotting_functions(self, context=None): noise=self.function_params['noise'], context='plot').function self.plot_function = DriftDiffusionIntegrator(rate=self.function_params['rate'], - noise=self.function_params['noise'], context='plot').function + noise=self.function_params['noise'], + context='plot').function def _execute(self, diff --git a/docs/source/AdaptiveMechanism.rst b/docs/source/AdaptiveMechanism.rst index 6e86f7a5f77..3c03d646d18 100644 --- a/docs/source/AdaptiveMechanism.rst +++ b/docs/source/AdaptiveMechanism.rst @@ -1,3 +1,5 @@ + + Adaptive Mechanisms =================== diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index 23af64c8d42..5dbe8a61965 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -97,13 +97,25 @@ def test_DDM_noise_0_5(): T = DDM( name='DDM', function=DriftDiffusionIntegrator( - noise=0.5, + noise=0.0, rate=1.0, time_step_size=1.0 - ), - time_scale=TimeScale.TIME_STEP + ) ) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) val = float(T.execute(stim)[0]) + + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + print(np.sqrt(0.5) * np.random.normal()) + + assert val == 9.892974291631234 # ------------------------------------------------------------------------------------------------ @@ -119,8 +131,7 @@ def test_DDM_noise_2_0(): noise=2.0, rate=1.0, time_step_size=1.0 - ), - time_scale=TimeScale.TIME_STEP + ) ) val = float(T.execute(stim)[0]) assert val == 9.785948583262465 From 8d38f22a1e1c443263ef599ad43689bea825e944 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Fri, 1 Sep 2017 18:02:45 -0400 Subject: [PATCH 18/69] updating tests to match ddm -- random seeds were disrupted by changes --- PsyNeuLink/Components/Functions/Function.py | 8 +++----- tests/mechanisms/test_ddm_mechanism.py | 19 ++++--------------- 2 files changed, 7 insertions(+), 20 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 6672a78da98..bfab5597c21 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4123,11 +4123,9 @@ def function(self, previous_value = np.atleast_2d(previous_value) new_value = variable - value = previous_value + rate * new_value * time_step_size \ - # + np.sqrt( - # time_step_size * noise) * np.random.normal() - print("noise = ", np.sqrt(0.5) * np.random.normal()) - print() + value = previous_value + rate * new_value * time_step_size \ + + np.sqrt(time_step_size * noise) * np.random.normal() + adjusted_value = value + offset # If this NOT an initialization run, update the old value and time # If it IS an initialization run, leave as is diff --git a/tests/mechanisms/test_ddm_mechanism.py b/tests/mechanisms/test_ddm_mechanism.py index 5dbe8a61965..685a9f724aa 100644 --- a/tests/mechanisms/test_ddm_mechanism.py +++ b/tests/mechanisms/test_ddm_mechanism.py @@ -97,26 +97,15 @@ def test_DDM_noise_0_5(): T = DDM( name='DDM', function=DriftDiffusionIntegrator( - noise=0.0, + noise=0.5, rate=1.0, time_step_size=1.0 ) ) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - val = float(T.execute(stim)[0]) - - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) - print(np.sqrt(0.5) * np.random.normal()) + val = float(T.execute(stim)[0]) - assert val == 9.892974291631234 + assert val == 9.308960184035778 # ------------------------------------------------------------------------------------------------ # TEST 3 @@ -134,7 +123,7 @@ def test_DDM_noise_2_0(): ) ) val = float(T.execute(stim)[0]) - assert val == 9.785948583262465 + assert val == 8.617920368071555 # ------------------------------------------------------------------------------------------------ From 8818a6f0fadb3048a6a5760db5ba0dd45eb26d9f Mon Sep 17 00:00:00 2001 From: jdcpni Date: Sat, 2 Sep 2017 08:11:25 -0400 Subject: [PATCH 19/69] - (#439) --- docs/source/ControlProjections.rst | 15 +++++++++++++++ docs/source/GatingMechanisms.rst | 2 ++ docs/source/GatingProjections.rst | 15 +++++++++++++++ docs/source/LearningMechanisms.rst | 2 ++ docs/source/LearningProjections.rst | 15 +++++++++++++++ docs/source/ModulatoryProjections.rst | 4 ++++ docs/source/PathwayProjections.rst | 1 + 7 files changed, 54 insertions(+) create mode 100644 docs/source/ControlProjections.rst create mode 100644 docs/source/GatingProjections.rst create mode 100644 docs/source/LearningProjections.rst diff --git a/docs/source/ControlProjections.rst b/docs/source/ControlProjections.rst new file mode 100644 index 00000000000..b8ce6c9fcca --- /dev/null +++ b/docs/source/ControlProjections.rst @@ -0,0 +1,15 @@ +ControlProjections +================== + +**Base class**: + +* `ControlProjection` + +**Subtypes**: + +.. _ControlProjection_Subtypes: + +.. toctree:: + :maxdepth: 1 + +[None] \ No newline at end of file diff --git a/docs/source/GatingMechanisms.rst b/docs/source/GatingMechanisms.rst index dc95bde27d5..3026b95c7a4 100644 --- a/docs/source/GatingMechanisms.rst +++ b/docs/source/GatingMechanisms.rst @@ -11,3 +11,5 @@ GatingMechanisms .. toctree:: :maxdepth: 1 + +[None] \ No newline at end of file diff --git a/docs/source/GatingProjections.rst b/docs/source/GatingProjections.rst new file mode 100644 index 00000000000..58dd5cc44d3 --- /dev/null +++ b/docs/source/GatingProjections.rst @@ -0,0 +1,15 @@ +GatingProjections +================== + +**Base class**: + +* `GatingProjection` + +**Subtypes**: + +.. _GatingProjection_Subtypes: + +.. toctree:: + :maxdepth: 1 + +[None] \ No newline at end of file diff --git a/docs/source/LearningMechanisms.rst b/docs/source/LearningMechanisms.rst index 3ec1183e036..ceab1b97389 100644 --- a/docs/source/LearningMechanisms.rst +++ b/docs/source/LearningMechanisms.rst @@ -11,3 +11,5 @@ LearningMechanisms .. toctree:: :maxdepth: 1 + +[None] \ No newline at end of file diff --git a/docs/source/LearningProjections.rst b/docs/source/LearningProjections.rst new file mode 100644 index 00000000000..c4245e093f6 --- /dev/null +++ b/docs/source/LearningProjections.rst @@ -0,0 +1,15 @@ +LearningProjections +=================== + +**Base class**: + +* `LearningProjection` + +**Subtypes**: + +.. _LearningProjection_Subtypes: + +.. toctree:: + :maxdepth: 1 + +[None] \ No newline at end of file diff --git a/docs/source/ModulatoryProjections.rst b/docs/source/ModulatoryProjections.rst index 9167e34b0ab..35266925dc6 100644 --- a/docs/source/ModulatoryProjections.rst +++ b/docs/source/ModulatoryProjections.rst @@ -5,3 +5,7 @@ ModulatoryProjections .. toctree:: :maxdepth: 1 + + LearningProjections + ControlProjections + GatingProjections \ No newline at end of file diff --git a/docs/source/PathwayProjections.rst b/docs/source/PathwayProjections.rst index 151a5197422..fd5f9adb6a5 100644 --- a/docs/source/PathwayProjections.rst +++ b/docs/source/PathwayProjections.rst @@ -6,4 +6,5 @@ PathwayProjections .. toctree:: :maxdepth: 1 + MappingProjection AutoAssociativeProjection \ No newline at end of file From f549b927b0b23dacaee6b7735cfaa055b7920835 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Sat, 2 Sep 2017 10:00:22 -0400 Subject: [PATCH 20/69] =?UTF-8?q?=E2=80=A2=20Functions=20(#441)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - docstring revs: Overview and Structure --- PsyNeuLink/Components/Functions/Function.py | 75 ++++++++++++++------- 1 file changed, 52 insertions(+), 23 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index de15c79e933..fd3e9c8c1cb 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -59,22 +59,21 @@ for use by other Components. Every Component in PsyNeuLink is assigned a Function; when that Component is executed, its Function's `function ` is executed. The `function ` can be any callable operation, although most commonly it is a mathematical operation (and, for those, almost always uses a call to one or -more numpy functions). There are two reasons PsyNeuLink packages functions in a Function Component: to *manage -parameters*, and for *modularity*. - -**Manage parameters**. Parameters are attributes of a Function that either remain stable over multiple calls to the -function (e.g., the `gain ` or `bias ` of a `Logistic` function, or the learning rate -of a learning function); or, if they change, they do so less frequently or under the control of different factors -than the function's variable (i.e., its input). As a consequence, it is useful to manage these separately from the -function's variable, and not have to provide them every time the function is called. To address this, every -PsyNeuLink Function has a set of attributes corresponding to the parameters of the function, that can be specified at -the time the Function is created (in arguments to its constructor), and can be modified independently -of a call to its :keyword:`function`. Modifications can be directly (e.g., in a script), or by the operation of other -PsyNeuLink Components (e.g., `AdaptiveMechanisms`) by way of `ControlProjections `. - -**Modularity**. By providing a standard interface, any Function assigned to a Components in PsyNeuLink can be replaced -with other PsyNeuLink Functions, or with user-written custom functions so long as they adhere to certain standards -(the PsyNeuLink :ref:`Function API `). +more numpy functions). There are two reasons PsyNeuLink packages functions in a Function Component: + +* **Manage parameters** -- parameters are attributes of a Function that either remain stable over multiple calls to the + function (e.g., the `gain ` or `bias ` of a `Logistic` function, or the learning rate + of a learning function); or, if they change, they do so less frequently or under the control of different factors + than the function's variable (i.e., its input). As a consequence, it is useful to manage these separately from the + function's variable, and not have to provide them every time the function is called. To address this, every + PsyNeuLink Function has a set of attributes corresponding to the parameters of the function, that can be specified at + the time the Function is created (in arguments to its constructor), and can be modified independently + of a call to its :keyword:`function`. Modifications can be directly (e.g., in a script), or by the operation of other + PsyNeuLink Components (e.g., `AdaptiveMechanisms`) by way of `ControlProjections `. +.. +* **Modularity** -- by providing a standard interface, any Function assigned to a Components in PsyNeuLink can be + replaced with other PsyNeuLink Functions, or with user-written custom functions so long as they adhere to certain + standards (the PsyNeuLink :ref:`Function API `). .. _Function_Creation: @@ -94,18 +93,35 @@ Structure --------- -Every Function has a `variable ` that provides the input to its -`function ` method. Its core attribute is its `function ` attribute -that determines the computation that it carries out. Ths must be a callable object (that is, a python function or -method of some kind). Unlike other PsyNeuLink `Components `, it *cannot* be (another) Function object (it -can't be "turtles" all the way down!). A Function also has an attribute for each of the parameters of its `function -`. If a Function has been assigned to another Component, then it also has an `owner -` attribute that refers to that Component. The Function itself is assigned as the Component's +.. _Function_Core_Attributes: + +Core Attributes +~~~~~~~~~~~~~~~ + +Every Function has the following core attributes: + +* `variable ` -- provides the input to the Function's `function `. +.. +* `function ` -- determines the computation carried out by the Function; it must be a + callable object (that is, a python function or method of some kind). Unlike other PsyNeuLink `Components + `, it *cannot* be (another) Function object (it can't be "turtles" all the way down!). If the Function + has been assigned to another `Component`, then its `function ` is also assigned as the + the `function ` attribute of the Component to which it has been assigned (i.e., its + `owner `. + +A Function also has an attribute for each of the parameters of its `function `. + +Owner +~~~~~ + +If a Function has been assigned to another `Component`, then it also has an `owner ` attribute +that refers to that Component. The Function itself is assigned as the Component's `function_object ` attribute. Each of the Function's attributes is also assigned as an attribute of the `owner `, and those are each associated with with a `parameterState ` of the `owner `. Projections to those parameterStates can be used by `ControlProjections ` to modify the Function's parameters. + COMMENT: .. _Function_Output_Type_Conversion: @@ -121,6 +137,19 @@ (see `Linear` for an example). COMMENT +.. _Function_Modulatory_Params: + +Modulatory Parameters +~~~~~~~~~~~~~~~~~~~~~ + +Some classes of Functions also implement a pair of modulatory parameters: `multiplicative_param` and `additive_param`. +Each of these is assigned the name of one of the function's parameters. These are used by `ModulatoryProjections +` to modulate the output of the function. For example, they are used by `GatingProjections +` to modulate the `function ` (and thereby the `value `) of +an `InputState` or `OutputState`; and by the `ControlProjection(s) ` of an `LCMechanism` to +modulate the `function ` of a `TransferMechanism`. + + .. _Function_Execution: Execution From 5e87345d4212cf5760bb2948ac7d6c4ca2a4b26f Mon Sep 17 00:00:00 2001 From: jdcpni Date: Sun, 3 Sep 2017 22:18:29 -0400 Subject: [PATCH 21/69] Feat/mechanism/lc (#443) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • LCMechanism * • index.rst - Changed main title to "Welcome..." * • LCMechanism - added • index.rst - Changed main title to "Welcome..." * • index.rst - Changed main title to "Welcome..." * - * - * - * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into feat/Mechanism/LC # Conflicts: # docs/source/AdaptiveMechanisms.rst * - * - * - * - * - * - * - * - * - * - * - --- PsyNeuLink/Components/Functions/Function.py | 15 +- .../ControlMechanism/ControlMechanism.py | 155 ++++- .../DefaultControlMechanism.py | 4 +- .../GatingMechanism/GatingMechanism.py | 61 +- .../States/ModulatorySignals/ControlSignal.py | 6 +- .../States/ModulatorySignals/GatingSignal.py | 19 +- PsyNeuLink/Components/States/State.py | 13 +- .../ControlMechanisms/EVC/EVCMechanism.py | 49 +- .../ControlMechanisms/LCMechanism.py | 655 ++++++++++++++++++ .../LCMechanism Test Script.py | 24 + docs/source/ControlMechanisms.rst | 1 + docs/source/EVCMechanism.rst | 3 +- docs/source/LCMechanism.rst | 6 + 13 files changed, 920 insertions(+), 91 deletions(-) create mode 100644 PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py create mode 100644 Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py create mode 100644 docs/source/LCMechanism.rst diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index fd3e9c8c1cb..da63c904557 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -1720,14 +1720,15 @@ def scale(self, val): class TransferFunction(Function_Base): """Function that transforms variable but maintains its shape - All TransferFunctions must have the attribute `bounds` that specifies the lower and upper limits of the result; - if there are none, the attribute is set to `None`; if it has at least one bound, the attribute is set to a - tuple specifying the lower and upper bounds, respectively, with `None` as the entry for no bound. + All TransferFunctions must have the following attributes: - All TransferFunctions must also have two attributes - multiplicative_param and additive_param - - each of which is assigned the name of one of the function's parameters; - this is for use by ModulatoryProjections (and, in particular, GatingProjections, - when the TransferFunction is used as the function of an InputState or OutputState). + `bounds` -- specifies the lower and upper limits of the result; if there are none, the attribute is set to + `None`; if it has at least one bound, the attribute is set to a tuple specifying the lower and upper bounds, + respectively, with `None` as the entry for no bound. + + `multiplicative_param` and `additive_param` -- each of these is assigned the name of one of the function's + parameters and used by `ModulatoryProjections ` to modulate the output of the + TransferFunction's function (see `Function_Modulatory_Params`). """ componentType = TRANSFER_FUNCTION_TYPE diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py index 999e555c337..ec9553cd237 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py @@ -14,15 +14,18 @@ A ControlMechanism is an `AdaptiveMechanism ` that modifies the parameter(s) of one or more `Components `. Its `function ` takes an evaluative signal (usually the -output of an `ObjectiveMechanism`) and uses that to calculate an `allocation_policy -`: a list of `allocation ` values for each of its -`ControlSignals `. This is used by each ControlSignal to calculate its `intensity`, which is then -conveyed by the ControlSignal's `ControlProjection(s) ` to the `ParameterState(s) -` to which they project. Each ParameterState then uses the value received by a ControlProjection to -modify the value of the parameter for which it is responsible (see `ModulatorySignal_Modulation` for a more detailed -description of how modulation operates). A ControlMechanism can regulate only the parameters of Components in the -`System` for which it is the `controller `. The control Components of a System can be -displayed using the System's `System_Base.show_graph` method with its **show_control** argument assigned as `True`. +output of an `ObjectiveMechanism`, listed in its `monitoring_mechanism ` +attribute) and uses that to calculate an `allocation_policy `: a list of +`allocation ` values for each of its `ControlSignals `. This is used by +each ControlSignal to calculate its `intensity`, which is then conveyed by the ControlSignal's `ControlProjection(s) +` to the `ParameterState(s) ` to which they project. Each ParameterState then +uses the value received by a ControlProjection to modify the value of the parameter for which it is responsible (see +`ModulatorySignal_Modulation` for a more detailed description of how modulation operates). A ControlMechanism can +regulate only the parameters of Components in the `System` for which it is the `controller +`. The OutputStates used to determine the ControlMechanism's `allocation_policy +` and the parameter is controls can be listed using its `show +` method. The control Components of a System can be displayed using the System's +`System_Base.show_graph` method with its **show_control** argument assigned as `True`. COMMENT: TBI The control Components of a System can be displayed using the System's `show_graph ` method with its **show_control** argument assigned as `True`. @@ -58,17 +61,12 @@ When a ControlMechanism is created, it automatically creates an `ObjectiveMechanism` that is used to monitor and evaluate the values specified in the **monitor_for_control** argument of the ControlMechanism's constructor (or of the -System that created the ControlMechanism). The **monitor_for_control** argument must be a list, each item of which must -refer to a `Mechanism ` or the `OutputState` of one. These are assigned to the ObjectiveMechanism's -`monitored_values ` attribute (and the ControlMechanism's `monitored_output_states` -` attribute), and the ObjectiveMechanism is referenced by the -ControlMechanism's `monitoring_mechanism ` attribute. The ObjectiveMechanism -monitors each Mechanism and/or OutputState listed in its `monitored_values ` -attribute (and the ControlMechanism's `monitored_output_states` ` -attribute), and evaluates them using the its `function `. The result is assigned as the -`value ` of the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState`, and (by way of a -`MappingProjection`) to the ControlMechanism's *ERROR_SIGNAL* `InputState`. This information is used by the -ControlMechanism to set the `allocation ` for each of the ControlMechanism's ControlSignals. +System that created the ControlMechanism). The ObjectiveMechanism is assigned to the ControlMechanism's +`monitoring_mechanism ` attribute, and the OutputStates specified in +the **monitor_for_control** argument are assigned to its `monitored_output_states +` attribute (as well as the ObjectiveMechanism's `monitored_values +` attribute). The **monitor_for_control** argument must be a list, each item of +which must refer to a `Mechanism ` or the `OutputState` of one. .. _ControlMechanism_Control_Signals: @@ -85,10 +83,74 @@ A `ControlSignal` is created for each item listed in the **control_signals** argument of its constructor, and all of the ControlSignals for a ControlMechanism are listed in its `control_signals ` attribute. Each ControlSignal is assigned a `ControlProjection` to the `ParameterState` associated with each parameter -it controls. ControlSignals are a type of `OutputState`, and so they are also listed in the ControlMechanism's -`output_states ` attribute. +it controls. +.. _ControlMechanism_Structure: + +Structure +--------- + +.. _ControlMechanism_Input: + +Input +~~~~~ + +A ControlMechanism has a single *ERROR_SIGNAL* `InputState`, the `value ` of which is used as the +input to the ControlMechanism's `function `, that determines the ControlMechanism's +`allocation_policy `. + +.. _ControlMechanism_Monitor_OutputStates: + +If the **monitor_for_control** argument of the ControlMechanism's constructor is specified, the following +Components are also automatically created and assigned to the ControlMechanism when it is created: + + * an `ObjectiveMechanism` -- this monitors the `value ` of each of the `OutputStates + ` specified in the **monitor_for_control** argument of the ControlMechanism's constructor. + The ObjectiveMechanism is assigned to the ControlMechanism's `monitoring_mechanism + ` attribute, and the OutputStates it monitors are listed in the + ControlMechanism's `monitored_output_states ` attribute + (as well as the ObjectiveMechanism's `monitored_values ` attribute). + The `monitored_output_states ` are evaluated by the + ObjectiveMechanism's `function `; the result is assigned as the `value + ` of the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState ` + and (by way of a `MappingProjection` -- see below) to the ControlMechanism's *ERROR_SIGNAL* `InputState`. + This information is used by the ControlMechanism to set the `allocation ` for each of + the ControlMechanism's ControlSignals (see `ControlMechanism_Function`). + .. + * a `MappingProjection` that projects from the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState + ` to the ControlMechanism's *ERROR_SIGNAL* `InputState`. + .. + * `MappingProjections ` from Mechanisms or OutputStates specified in + the **monitor_for_control** argument of the ControlMechanism's constructor to the ObjectiveMechanism's + `primary InputState `. + +The OutputStates monitored by the ControlMechanism's `monitoring_mechanism ` +can be displayed using its :func:`show ` method. + +.. _ControlMechanism_Function: + +Function +~~~~~~~~ + +A ControlMechanism's `function ` uses the `value ` of its +*ERROR_SIGNAL* `InputState` to generate an `allocation_policy `. Each item +of the `allocation_policy ` is assigned as the `value +` of a corresponding `ControlSignal` in `control_signals `. + +.. _ControlMechanism_Output: + +Output +~~~~~~ + +A ControlMechanism has a `ControlSignal` for each parameter specified in its `control_signals +` attribute, that sends a `ControlProjection` to the `ParameterState` for the +corresponding parameter. The `value ` of each ControlSignal is assigned the value of the +corresponding item in the ControlMechanism's `allocation_policy ` attribute. +ControlSignals are a type of `OutputState`, and so they are also listed in the ControlMechanism's `output_states +` attribute. The parameters modulated by an ControlMechanism's ControlSignals can be +displayed using its :func:`show ` method. + COMMENT: .. _ControlMechanism_Examples: @@ -149,6 +211,9 @@ from PsyNeuLink.Globals.Utilities import ContentAddressableList from PsyNeuLink.Scheduling.TimeScale import CentralClock, TimeScale +MONITORING_MECHANISM = 'monitoring_mechanism' +ALLOCATION_POLICY = 'allocation_policy' + ControlMechanismRegistry = {} class ControlMechanismError(Exception): @@ -161,9 +226,9 @@ class ControlMechanism_Base(AdaptiveMechanism_Base): """ ControlMechanism_Base( \ monitor_for_control=None, \ + function=Linear, \ control_signals=None, \ modulation=ModulationParam.MULTIPLICATIVE \ - function=Linear, \ params=None, \ name=None, \ prefs=None) @@ -213,6 +278,9 @@ class ControlMechanism_Base(AdaptiveMechanism_Base): specifies set of OutputStates to monitor (see :ref:`ControlMechanism_Monitored_OutputStates` for specification options). + function : TransferFunction : default Linear(slope=1, intercept=0) + specifies function used to combine values of monitored OutputStates. + control_signals : List[parameter of Mechanism or its function, \ ParameterState, Mechanism tuple[str, Mechanism] or dict] specifies the parameters to be controlled by the ControlMechanism @@ -222,9 +290,6 @@ class ControlMechanism_Base(AdaptiveMechanism_Base): specifies the default form of modulation used by the ControlMechanism's `ControlSignals `, unless they are `individually specified `. - function : TransferFunction : default Linear(slope=1, intercept=0) - specifies function used to combine values of monitored OutputStates. - params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that can be used to specify the parameters for the Mechanism, parameters for its function, and/or a custom function and its parameters. Values @@ -254,14 +319,6 @@ class ControlMechanism_Base(AdaptiveMechanism_Base): the ControlMechanism's constructor, the `value ` \\s of which serve as the items of the ControlMechanism's `variable `. - control_signals : List[ControlSignal] - list of `ControlSignals ` for the ControlMechanism, each of which sends a `ControlProjection` - to the `ParameterState` for the parameter it controls (same as ControlMechanism's - `output_states ` attribute). - - control_projections : List[ControlProjection] - list of `ControlProjections `, one for each `ControlSignal` in `control_signals`. - function : TransferFunction : default Linear(slope=1, intercept=0) determines how the `value ` \\s of the `OutputStates ` specified in the **monitor_for_control** argument of the ControlMechanism's constructor are used to generate its @@ -272,6 +329,14 @@ class ControlMechanism_Base(AdaptiveMechanism_Base): ControlSignal listed in the `control_signals` attribute; the allocation_policy is the same as the ControlMechanism's `value ` attribute). + control_signals : List[ControlSignal] + list of `ControlSignals ` for the ControlMechanism, each of which sends a `ControlProjection` + to the `ParameterState` for the parameter it controls (same as ControlMechanism's + `output_states ` attribute). + + control_projections : List[ControlProjection] + list of `ControlProjections `, one for each `ControlSignal` in `control_signals`. + modulation : ModulationParam the default form of modulation used by the ControlMechanism's `ControlSignals `, unless they are `individually specified `. @@ -295,7 +360,10 @@ class ClassDefaults(AdaptiveMechanism_Base.ClassDefaults): from PsyNeuLink.Components.Functions.Function import Linear paramClassDefaults = Mechanism_Base.paramClassDefaults.copy() - paramClassDefaults.update({CONTROL_PROJECTIONS: None}) + paramClassDefaults.update({ + MONITORING_MECHANISM: None, + ALLOCATION_POLICY: None, + CONTROL_PROJECTIONS: None}) @tc.typecheck def __init__(self, @@ -350,7 +418,7 @@ def _validate_params(self, request_set, target_set=None, context=None): else: self.paramClassDefaults[SYSTEM] = request_set[SYSTEM] - if MONITOR_FOR_CONTROL in target_set: + if MONITOR_FOR_CONTROL in target_set and target_set[MONITOR_FOR_CONTROL] is not None: for spec in target_set[MONITOR_FOR_CONTROL]: if isinstance(spec, MonitoredOutputStatesOption): continue @@ -375,6 +443,7 @@ def _validate_params(self, request_set, target_set=None, context=None): # FIX: REPLACE WITH CALL TO _parse_state_spec WITH APPROPRIATE PARAMETERS if CONTROL_SIGNALS in target_set and target_set[CONTROL_SIGNALS]: + # MODIFIED 9/3/17 OLD: from PsyNeuLink.Components.States.ModulatorySignals.ControlSignal import ControlSignal for spec in target_set[CONTROL_SIGNALS]: @@ -484,6 +553,16 @@ def _validate_params(self, request_set, target_set=None, context=None): mech.name, self.system.name)) + # # MODIFIED 9/3/17 NEW: + # if not isinstance(target_set[CONTROL_SIGNALS], list): + # raise ControlMechanismError("{} arg of {} must be list". + # format(CONTROL_SIGNAL, self.name)) + # + # for spec in target_set[CONTROL_SIGNALS]: + # _parse_state_spec(self, spec) + # + # # MODIFIED 9/3/17 END: + def _instantiate_monitored_output_states(self, context=None): raise ControlMechanismError("{0} (subclass of {1}) must implement _instantiate_monitored_output_states". format(self.__class__.__name__, @@ -834,6 +913,10 @@ def _execute(self, raise ControlMechanismError("{0} must implement execute() method".format(self.__class__.__name__)) def show(self): + """Display the OutputStates monitored by ControlMechanism's `monitoring_mechanism + ` and the parameters modulated by its `control_signals + `. + """ print ("\n---------------------------------------------------------") diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/DefaultControlMechanism.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/DefaultControlMechanism.py index 526277ec59c..726d47e7e6d 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/DefaultControlMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/DefaultControlMechanism.py @@ -134,8 +134,8 @@ def _execute(self, def _instantiate_input_states(self, context=None): """Instantiate input_value attribute - Instantiate input_states attribute (in case they are referenced) and - assign any OutputStates that project to them to monitored_output_states + Instantiate input_states and monitored_output_states attributes (in case they are referenced) + and assign any OutputStates that project to the input_states to monitored_output_states IMPLEMENTATION NOTE: At present, these are dummy assignments, simply to satisfy the requirements for subclasses of ControlMechanism; in the future, an _instantiate_monitoring_mechanism() diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/GatingMechanism/GatingMechanism.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/GatingMechanism/GatingMechanism.py index 54219351825..b38bf863cdf 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/GatingMechanism/GatingMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/GatingMechanism/GatingMechanism.py @@ -19,11 +19,14 @@ one for each of its `GatingSignals `. Each of those, in turn, generates a `gating_signal ` used by its `GatingProjections ` to modulate the value of the State( s) to which they project. A GatingMechanism can regulate only the parameters of Mechanisms in the `System` to which -it belongs. +it belongs. The InputStates and/or OutputStates gated by a GatingMechanism can be list using its `show +` method. + COMMENT: TBI The gating components of a System can be displayed using the System's `show_graph` method with its **show_gating** argument assigned as :keyword:``True`. COMMENT + The gating components of a System are executed after all `ProcessingMechanisms `, `LearningMechanism `, and `ControlMechanism ` in that System have been executed. @@ -60,8 +63,7 @@ A `GatingSignal` is created for each item listed in the **gating_signals** argument of the constructor, and all of the GatingSignals for a GatingMechanism are listed in its `gating_signals ` attribute. Each GatingSignal is assigned one or more `GatingProjections ` to the InputState(s) and/or -OutputState(s) it gates. GatingSignals are a type of `OutputState`, and so they are also listed in the -GatingMechanism's `output_states ` attribute. +OutputState(s) it gates. .. _GatingMechanism_Modulation: @@ -74,6 +76,42 @@ modulate the value of a State). Each GatingSignal uses this value, unless its value is `individually specified `. +.. _GatingMechanism_Structure: + +Structure +--------- + +.. _GatingMechanism_Input: + +Input +~~~~~ + +By default, a GatingMechanism has a single `InputState`, the `value ` of which is used +as the input to the GatingMechanism's `function `. + +.. _GatingMechanism_Function: + +Function +~~~~~~~~ + +A GatingMechanism's `function ` uses the `value ` of its +`primary InputState ` to generate an `gating_policy `, +each item of which is assigned as the `value ` of a corresponding `GatingSignal` in its +`gating_signals ` attribute. + +.. _GatingMechanism_Output: + +Output +~~~~~~ + +A GatingMechanism has a `GatingSignal` for each `InputState` and/or `OutputState` specified in its `gating_signals +` attribute, to which it sends a `GatingProjection`. The `value ` +of each GatingSignal is assigned the value of the corresponding item in the GatingMechanism's `gating_policy +` attribute. GatingSignals are a type of `OutputState`, and so they are also listed +in the GatingMechanism's `output_states ` attribute. The InputStates and/or OutputStates +modulated by a GatingMechanism's GatingSignals can be displayed using its :func:`show ` method. + + .. _GatingMechanism_Execution: Execution @@ -181,8 +219,7 @@ class GatingMechanism(AdaptiveMechanism_Base): function : TransferFunction : default Linear(slope=1, intercept=0) specifies the function used to transform the GatingMechanism's `variable ` - to a `gating_policy`; the default is an identity function that simply assigns - `variable ` as the `gating_policy `. + to a `gating_policy`. gating_signals : List[GatingSignal, InputState, OutputState, Mechanism, tuple[str, Mechanism], or dict] specifies the `InputStates ` and/or `OutputStates ` @@ -200,7 +237,7 @@ class GatingMechanism(AdaptiveMechanism_Base): specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - name : str : default ControlMechanism- + name : str : default GatingMechanism- a string used for the name of the Mechanism. If not is specified, a default is assigned by `MechanismRegistry` (see :doc:`Registry ` for conventions used in naming, including for default and duplicate names). @@ -220,6 +257,11 @@ class GatingMechanism(AdaptiveMechanism_Base): and is the same format as its `gating_policy ` (unless a custom `function ` has been assigned). + function : TransferFunction + determines the function used to transform the GatingMechanism's `variable ` + to a `gating_policy`; the default is an identity function that simply assigns + `variable ` as the `gating_policy `. + gating_signals : List[GatingSignal] list of `GatingSignals ` for the GatingMechanism, each of which sends `GatingProjection(s) ` to the `InputState(s) ` and/or `OutputStates ` @@ -398,7 +440,7 @@ def _instantiate_gating_signal(self, gating_signal=None, context=None): # Deferred Initialization, so assign owner, name, and initialize if gating_signal.init_status is InitStatus.DEFERRED_INITIALIZATION: # FIX 5/23/17: IMPLEMENT DEFERRED_INITIALIZATION FOR GatingSignal - # CALL DEFERRED INIT WITH SELF AS OWNER ??AND NAME FROM gating_signal_dict?? (OR WAS IT SPECIFIED) + # CALL DEFERRED INIT WITH SELF AS OWNER ??A˝ND NAME FROM gating_signal_dict?? (OR WAS IT SPECIFIED) # OR ASSIGN NAME IF IT IS DEFAULT, USING GATING_SIGNAL_DICT?? # # MODIFIED 7/7/17 OLD: # pass @@ -580,13 +622,16 @@ def _execute(self, # return gating_policy def show(self): + """Display the InputStates and/or OutputStates gated by the GatingMechanism's `gating_signals + `. + """ print ("\n---------------------------------------------------------") print ("\n{0}".format(self.name)) print ("\n\tGating the following Mechanism InputStates and/or OutputStates:".format(self.name)) # Sort for consistency of output: - state_names_sorted = sorted(self.output_states.keys()) + state_names_sorted = sorted(self.output_states) for state_name in state_names_sorted: for projection in self.output_states[state_name].efferents: print ("\t\t{0}: {1}".format(projection.receiver.owner.name, projection.receiver.name)) diff --git a/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py b/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py index 65f5d696791..af4eca9da68 100644 --- a/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py +++ b/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py @@ -250,7 +250,9 @@ name='My EVC Mechanism') COMMENT -*Modulate the parameters of several Mechanisms in a System*. This shows:: +*Modulate the parameters of several Mechanisms in a System*. The following example assigns ControlSignals to modulate +the `gain ` parameter of the `Logistic` function for ``My_Mech_A`` and the `intercept +` parameter of the `Linear` function for ``My_Mech_B``:: My_Mech_A = TransferMechanism(function=Logistic) My_Mech_B = TransferMechanism(function=Linear, @@ -264,7 +266,7 @@ control_signals=[(GAIN, My_Mech_A), {NAME: INTERCEPT, MECHANISM: My_Mech_B, - MODULATION:ModulationParam.ADDITIVE}], + MODULATION: ModulationParam.ADDITIVE}], name='My Test System') diff --git a/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py b/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py index 810645eacdc..276cee518aa 100644 --- a/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py +++ b/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py @@ -162,28 +162,27 @@ **Modulate the InputStates of several Mechanisms**. In next example, a `GatingMechanism` is created that modulates the `InputState` of all the layers in a 3-layered feedforward neural network. Ordinarily, gating modulates the *MULTIPLICATIVE_PARAM* of an InputState's `function `. In the example, this is changed so that -it adds the `value ` of the `GatingSignal` to the `value ` of each InputState:: +it *adds* the `value ` of the `GatingSignal` to the `value ` of each InputState:: my_input_layer = TransferMechanism(size=3) my_hidden_layer = TransferMechanism(size=5) my_output_layer = TransferMechanism(size=2) - my_gating_mechanism = GatingMechanism(gating_signals=[ - {'GATE_ALL': [my_input_layer, - my_hidden_layer, - my_output_layer]}, - modulation=ModulationParam.ADDITIVE) + my_gating_mechanism = GatingMechanism(gating_signals=[{'GATE_ALL': [my_input_layer, + my_hidden_layer, + my_output_layer]}], + modulation=ModulationParam.ADDITIVE) Note that, again, the **gating_signals** are listed as Mechanisms, since in this case it is their primary InputStates that are to be gated. Since they are all listed in a single entry of a -`specification dictionary <_GatingSignal_Specification>`, they will all be gated by a single GatingSignal named +`specification dictionary `, they will all be gated by a single GatingSignal named ``GATE_ALL``, that will send a `GatingProjection` to the InputState of each of the Mechanisms listed (the next example shows how different InputStates can be differentially gated by a `GatingMechanism`). Finally, note that the -`ModulationParam` specified for the `GatingMechanism` (and therefore the default for its GatingSignals, pertains to +`ModulationParam` specified for the `GatingMechanism` (and therefore the default for its GatingSignals) pertains to the `function ` of each `InputState`. By default that is a `Linear` function, the *ADDITIVE_PARAM* of which is its `intercept ` parameter. Therefore, in the example above, each time the InputStates are updated, the value of the GatingSignal will be assigned as the `intercept` of each InputState's `function `, thus adding that amount to the input to the State before determining its -`value `. +`value `. **Gate InputStates differentially**. In the example above, the InputStates for all of the Mechanisms were gated using a single GatingSignal. In the example below, a different GatingSignal is assigned to the InputState of each @@ -465,7 +464,7 @@ def _parse_gating_signal_spec(owner, state_spec): - {NAME:str, MECHANISM:Mechanism} dict where: str is the name of an InputState or OutputState of the Mechanism, - Mechanism is a reference to an existing that belongs to self.system + Mechanism is a reference to an existing Mechanism that belongs to self.system Checks for duplicate state specifications within state_spec or with any existing GatingSignal of the owner (i.e., states that will receive more than one GatingProjection from the owner) diff --git a/PsyNeuLink/Components/States/State.py b/PsyNeuLink/Components/States/State.py index b4eaf3ac8aa..37ebdbc1e88 100644 --- a/PsyNeuLink/Components/States/State.py +++ b/PsyNeuLink/Components/States/State.py @@ -1969,12 +1969,21 @@ def _instantiate_state_list(owner, # Note: still need to get indexed element of constraint_value, # since it was passed in as a 2D array (one for each State) else: + # # MODIFIED 9/3/17 OLD: + # # If only one State, don't add index suffix + # if num_states == 1: + # state_name = 'Default_' + state_param_identifier[:-1] + # # Add incremented index suffix for each State name + # else: + # state_name = 'Default_' + state_param_identifier[:-1] + "-" + str(index+1) + # MODIFIED 9/3/17 NEW: # If only one State, don't add index suffix if num_states == 1: - state_name = 'Default_' + state_param_identifier[:-1] + state_name = 'Default_' + state_param_identifier # Add incremented index suffix for each State name else: - state_name = 'Default_' + state_param_identifier[:-1] + "-" + str(index+1) + state_name = 'Default_' + state_param_identifier + "-" + str(index+1) + # MODIFIED 9/3/17 END # If it is an "exposed" number, make it a 1d np.array if isinstance(state_spec, numbers.Number): state_spec = np.atleast_1d(state_spec) diff --git a/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py index 964e871f57d..ffe1427af6f 100644 --- a/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py +++ b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py @@ -27,7 +27,9 @@ combination of ControlSignal `intensity` values is called an `allocation_policy`. When a `System` is executed that uses an EVCMechanism as its `controller `, it concludes by executing the EVCMechanism, which determines its `allocation_policy` for the next `TRIAL`. That, in turn, determines the `intensity` for each of the -ControlSignals, and therefore the values of the parameters they control on the next `TRIAL`. +ControlSignals, and therefore the values of the parameters they control on the next `TRIAL`. The OutputStates used to +determine an EVCMechanism's `allocation_policy ` and the parameters it controls can +be listed using its `show ` method. .. _EVCMechanism_EVC: @@ -307,7 +309,8 @@ from PsyNeuLink.Components.Component import function_type from PsyNeuLink.Components.Functions.Function import ModulationParam, _is_modulation_param -from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.ControlMechanism.ControlMechanism import ControlMechanism_Base +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.ControlMechanism.ControlMechanism \ + import ControlMechanism_Base, MONITORING_MECHANISM, ALLOCATION_POLICY from PsyNeuLink.Components.Mechanisms.Mechanism import MechanismList from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms import IntegratorMechanism from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism import \ @@ -332,9 +335,6 @@ # ------------------------------------------- KEY WORDS ------------------------------------------------------- -MONITORING_MECHANISM = 'monitoring_mechanism' -ALLOCATION_POLICY = 'allocation_policy' - # ControlSignal Costs INTENSITY_COST = 'INTENSITY COST' ADJUSTMENT_COST = 'ADJUSTMENT COST' @@ -374,13 +374,13 @@ class EVCMechanism(ControlMechanism_Base): prediction_mechanism_type=IntegratorMechanism, \ prediction_mechanism_params=None, \ monitor_for_control=None, \ - control_signals=None, \ function=ControlSignalGridSearch \ value_function=ValueFunction, \ outcome_function=LinearCombination(operation=PRODUCT), \ cost_function=LinearCombination(operation=SUM), \ combine_outcome_and_cost_function=LinearCombination(operation=SUM) \ save_all_values_and_policies:bool=:keyword:`False`, \ + control_signals=None, \ params=None, \ name=None, \ prefs=None) @@ -456,10 +456,6 @@ class EVCMechanism(ControlMechanism_Base): specifies set of `OutputStates ` to monitor (see `ControlMechanism_Monitored_OutputStates` for specification options). - control_signals : List[Attribute of Mechanism or its function, ParameterState, or tuple[str, Mechanism] - specifies the parameters to be controlled by the EVCMechanism - (see `control_signals ` for details). - function : function or method : ControlSignalGridSearch specifies the function used to determine the `allocation_policy` for the next execution of the EVCMechanism's `system ` (see `function ` for details). @@ -483,6 +479,10 @@ class EVCMechanism(ControlMechanism_Base): save_all_values_and_policies : bool : default False specifes whether to save every `allocation_policy` tested in `EVC_policies` and their values in `EVC_values`. + control_signals : List[Attribute of Mechanism or its function, ParameterState, or tuple[str, Mechanism] + specifies the parameters to be controlled by the EVCMechanism + (see `control_signals ` for details). + params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that can be used to specify the parameters for the Mechanism, its `function `, and/or a custom function and its parameters. Values @@ -509,10 +509,6 @@ class EVCMechanism(ControlMechanism_Base): system : System the `System` for which EVCMechanism is the `controller `. - control_signals : ContentAddressableList[ControlSignal] - list of the EVCMechanism's `ControlSignals `, including any that it inherited - from its `system `. - prediction_mechanisms : List[ProcessingMechanism] list of `predictions mechanisms ` generated for the EVCMechanism's `system ` when the EVCMechanism is created, one for each `ORIGIN` Mechanism in the System. @@ -600,12 +596,6 @@ class EVCMechanism(ControlMechanism_Base): controller.combine_outcome_and_cost_function - combines outcomes and costs COMMENT - allocation_policy : 2d np.array : defaultControlAllocation - determines the value assigned as the `variable ` for each `ControlSignal` and its - associated `ControlProjection`. Each item of the array must be a 1d array (usually containing a scalar) - that specifies an `allocation` for the corresponding ControlSignal, and the number of items must equal the - number of ControlSignals in the EVCMechanism's `control_signals` attribute. - value_function : function : default ValueFunction calculates the `EVC ` for a given `allocation_policy`. It takes as its arguments an `EVCMechanism`, an **outcome** value and a list or ndarray of **costs**, uses these to calculate an EVC, @@ -700,6 +690,16 @@ class EVCMechanism(ControlMechanism_Base): EVC_values : 1d np.array array of `EVC ` values, each of which corresponds to an `allocation_policy` in `EVC_policies`; + allocation_policy : 2d np.array : defaultControlAllocation + determines the value assigned as the `variable ` for each `ControlSignal` and its + associated `ControlProjection`. Each item of the array must be a 1d array (usually containing a scalar) + that specifies an `allocation` for the corresponding ControlSignal, and the number of items must equal the + number of ControlSignals in the EVCMechanism's `control_signals` attribute. + + control_signals : ContentAddressableList[ControlSignal] + list of the EVCMechanism's `ControlSignals `, including any that it inherited + from its `system `. + """ componentType = EVC_MECHANISM @@ -722,7 +722,6 @@ class ClassDefaults(ControlMechanism_Base.ClassDefaults): # from Components.__init__ import DefaultSystem paramClassDefaults = ControlMechanism_Base.paramClassDefaults.copy() paramClassDefaults.update({MAKE_DEFAULT_CONTROLLER: True, - ALLOCATION_POLICY: None, PARAMETER_STATES: False}) @tc.typecheck @@ -1507,9 +1506,13 @@ def outcome_function(self, value): else: self._outcome_function = value - # MODIFIED 7/27/17 NEW: + # # MODIFIED 7/27/17 NEW: + # # Assign outcome_function to monitoring_mechanism + # if hasattr(self, MONITORING_MECHANISM): + # self.monitoring_mechanism.assign_params({FUNCTION:self.outcome_function}) + # MODIFIED 9/3/17 NEWER: # Assign outcome_function to monitoring_mechanism - if hasattr(self, MONITORING_MECHANISM): + if self.monitoring_mechanism is not None: self.monitoring_mechanism.assign_params({FUNCTION:self.outcome_function}) # MODIFIED 7/27/17 END diff --git a/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py new file mode 100644 index 00000000000..a27c9f45ae0 --- /dev/null +++ b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py @@ -0,0 +1,655 @@ +# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy of the License at: +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. + + +# ************************************** LCMechanism ************************************************ + +""" + +.. note:: + **THIS MECHANISM IS ONLY PARTIALLY IMPLEMENTED.** + + IT CAN MODULATE MECHANISMS, BUT: + + - IT DOES NOT YET AUTOMATICALLY GENERATE A `UtilityIntegrator` AS ITS OBJECTIVE MECHANISM + .. + - THE `FitzHughNagumoIntegration` FUNCTION AND ASSOCIATED `mode` PARAMETER HAVE NOT YET BEEN IMPLEMENTED + + +Overview +-------- + +An LCMechanism is a `ControlMechanism ` that multiplicatively modulates the `function +` of one or more `Mechanisms ` (usually `TransferMechanisms `). +It implements an abstract model of the `locus coeruleus (LC) `_ that, +together with a `UtilityIntegrator` Mechanism, implement a form of the `Adaptive Gain Theory +`_ of the locus coeruleus-norepinephrine +(LC-NE) system. The LCMechanism uses a `FitzHughNagumoIntegration` Function to generate its output, under the +influence of a `mode ` parameter that regulates its operation between "tonic" to "phasic" modes of +responding -- see `Gilzenrat et al., <2002https://www.ncbi.nlm.nih.gov/pubmed/12371518>`_). The Mechanisms modulated +by an LCMechanism can be listed using its `show ` method. + +.. _LCMechanism_Creation: + +Creating an LCMechanism +--------------------------- + +An LCMechanism can be created in any of the ways used to `create Mechanisms `. Like any Mechanism, +its **input_states** argument can be used to `specify Mechanisms (and/or their OutputState(s) +` to project to the LCMechanism (i.e., to drive its response). The `Mechanisms +` it controls are specified in the **modulated_mechanisms** argument of its constructor +(see `LCMechanism_modulate`). +COMMENT: +In addition, one or more Mechanisms can be specified to govern the LCMechanism's +`mode ` parameter, by specifying them in the **monitor_for_control** argument of its constructor +(see `LCMechanism_Monitored_OutputStates`). +COMMENT + +.. _LCMechanism_Modulate: + +Specifying Mechanisms to Modulate +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Mechanisms to be modulated by a LCMechanism are specified in its **modulated_mechanisms** argument. An LCMechanism +controls a `Mechanism` by modifying the `multiplicative parameter ` of the Mechanism's +`function `. Therefore, any Mechanism specified for control by an LCMechanism must be +either a `TransferMechanism`, or a Mechanism that uses a `TransferFunction` or a class of `Function ` that +implements a `multiplicative parameter `. The **controls_signals** argument must be a list +of such Mechanisms. The keyword *ALL* can also be used to specify all of the eligible `ProcessMechanisms + in all of the `Compositions ` to which the LCMechanism belongs. If a Mechanism +specified in the **modulated_mechanisms** argument does not implement a multiplicative parameter, it is ignored. A +`ControlProjection` is automatically created that projects from the LCMechanism to the `ParameterState` for the +`multiplicative parameter ` of every Mechanism specified in the **modulated_mechanisms** +argument (and listed in its `modulated_mechanisms ` attribute). + + +COMMENT: +.. _LCMechanism_Monitored_OutputStates: + +Specifying Values to Monitor for Control +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If the **monitor_for_control** argument is specified in the LCMechanism's constructor, it automatically creates a +`UtilityIntegratorMechanism` that is used to monitor and evaluate the `value ` of the `OutputStates +` specified. The **monitor_for_control** argument must be a list, each item of which must refer to a +`Mechanism ` or the `OutputState` of one. These are assigned to the UtilityIntegratorMechanism's +`monitored_values ` attribute (and the LCMechanism's `monitored_output_states +` attribute). The UtilityIntegratorMechanism itself is assigned to the +LCMechanism's `monitoring_mechanism ` attribute). +COMMENT + +.. _LCMechanism_Structure: + +Structure +--------- + +.. _LCMechanism_Input: + +Input +~~~~~ + +An LCMechanism has a single (primary) `InputState ` that receives projections from any Mechanisms +specified in the **input_states** argument of the LCMechanism's constructor; its `value ` is used as +the input to the LCMechanism's `function `. + +.. _LCMechanism_Function: + +Function +~~~~~~~~ + +An LCMechanism uses the `FitzHughNagumoIntegrator` as its Function. This takes the input the LCMechanism as its +`variable `, and uses the LCMechanism's `mode ` attribute as its +XXX parameter. Its result is assigned as the `value ` of the LCMechanism's `ControlSignal`, which +is used to modulate the Mechanisms specified in its `modulated_mechanisms ` attribute. + +COMMENT: +If the **monitor_for_control** argument of the LCMechanism's constructor is specified, the following +Components are also automatically created and assigned to the LCMechanism when it is created: + +XXX ASSIGN CONTROLLER: USES THE monitored_values ATTRIBUTE OF ITS CONTROLLER, + AS WELL AS ANY SPECIFIED IN monitor_for_control +XXX ASSIGN monitor_for_control: THESE ARE ADDED TO ITS CONTROLLER'S monitored_values LIST; + IF NO CONTROLLER IS SPECIFIED, ONE IS CREATED + +* a `UtilityIntegratorMechanism` -- this monitors the `value ` of each of the `OutputStates + ` specified in the **monitor_for_control** argument of the LCMechanism's constructor; these are + listed in the LCMechanism's `monitored_output_states ` attribute, and the + `monitored_values ` attribute of the UtilityIntegratorMechanism. They + are evaluated by the UtilityIntegratorMechanism's `function `; the result is + assigned as the `value ` of the UtilityIntegratorMechanism's *UTILITY_SIGNAL* `OutputState + ` and (by way of a `MappingProjection` -- see below) to the LCMechanism's *MODE* + `InputState`. This information is used by the LCMechanism to set the `value ` for its + `ControlSignal`. +.. +* a `MappingProjection` that projects from the UtilityIntegratorMechanism's *UTILITY_SIGNAL* `OutputState + ` to the LCMechanism's *MODE* `. +.. +* `MappingProjections ` from Mechanisms or OutputStates specified in **monitor_for_control** to + the UtilityIntegratorMechanism's `primary InputState `. +COMMENT + +.. _LCMechanism_Output: + +Output +~~~~~~ + +COMMENT: +VERSION FOR SINGLE ControlSignal +An LCMechanism has a single `ControlSignal` used to modulate the function of the Mechanism(s) listed in its +`modulated_mechanisms ` attribute. The ControlSignal is assigned a +`ControlProjection` to the `ParameterState` for the `multiplicative parameter ` of the +`function ` for each of those Mechanisms. +COMMENT + +An LCMechanism has a `ControlSignal` for each Mechanism listed in its `modulated_mechanisms +` attribute. All of its ControlSignals are assigned the same value: the result of +the LCMechanism's `function `. Each ControlSignal is assigned a `ControlProjection` to the +`ParameterState` for the `multiplicative parameter ` of `function +` for the Mechanism in `modulated_mechanisms ` to which it +corresponds. + +The Mechanisms modulated by an LCMechanism can be displayed using its :func:`show ` method. + +.. _LCMechanism_Examples: + +Examples +~~~~~~~~ + +The following example generates an LCMechanism that modulates the function of two TransferMechanisms, one that uses +a `Linear` function and the other a `Logistic` function:: + + my_mech_1 = TransferMechanism(function=Linear, + name='my_linear_mechanism') + my_mech_2 = TransferMechanism(function=Logistic, + name='my_logistic_mechanism') + + LC = LCMechanism(modulated_mechanisms=[my_mech_1, my_mech_2], + name='my_LC') + +Calling `my_LC.show()` generates the following report:: + + my_LC +COMMENT: + Monitoring the following Mechanism OutputStates: + None +COMMENT + + Modulating the following Mechanism parameters: + my_logistic_mechanism: gain + my_linear_mechanism: slope + +Note that the LCMechanism controls the `multiplicative parameter ` of the `function +` of each Mechanism: the `gain ` parameter for ``my_mech_1``, since it uses +a `Logistic` Function; and the `slope ` parameter for ``my_mech_2``, since it uses a `Linear` Function. + +COMMENT: + +ADDITIONAL EXAMPLES HERE OF THE DIFFERENT FORMS OF SPECIFICATION FOR +**monitor_for_control** and **modulated_mechanisms** + +STRUCTURE: +MODE INPUT_STATE <- NAMED ONE, LAST? +SIGNAL INPUT_STATE(S) <- PRIMARY; MUST BE FROM PROCESSING MECHANISMS +CONTROL SIGNALS + +COMMENT + +.. _LCMechanism_Execution: + +Execution +--------- + +Like other `ControlMechanisms `, an LCMechanism executes after all of the `ProcessingMechanisms +` in the `Composition` to which it belongs have `executed ` in a `TRIAL`. +It's `function ` takes the `value ` of the LCMechanism's `primary InputState +` as its input, and generates a response -- under the influence of its `mode ` +parameter -- that is assigned as the `value ` of its `ControlSignal`. The latter is used by its +`ControlProjections ` to modulate the response -- in the next `TRIAL` of execution -- of the +Mechanisms to which the LCMechanism projects + +.. note:: + The `ParameterState` that receives a `ControlProjection` does not update its value until its owner Mechanism + executes (see `Lazy Evaluation ` for an explanation of "lazy" updating). This means that even if a + LCMechanism has executed, the `multiplicative parameter ` parameter of the `function + ` of a Mechanism that it controls will not assume its new value until that Mechanism has + executed. + +.. _LCMechanism_Class_Reference: + +Class Reference +--------------- + +""" +import typecheck as tc + +from PsyNeuLink.Components.Functions.Function import ModulationParam, _is_modulation_param, MULTIPLICATIVE_PARAM +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.AdaptiveMechanism import AdaptiveMechanism_Base +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.ControlMechanism.ControlMechanism \ + import ControlMechanism_Base, ALLOCATION_POLICY +from PsyNeuLink.Components.Projections.ModulatoryProjections.ControlProjection import ControlProjection +from PsyNeuLink.Components.Functions.Function import Integrator +from PsyNeuLink.Components.Mechanisms.Mechanism import Mechanism_Base +from PsyNeuLink.Components.ShellClasses import Mechanism +from PsyNeuLink.Globals.Defaults import defaultControlAllocation +from PsyNeuLink.Globals.Keywords import FUNCTION, ALL, INIT__EXECUTE__METHOD_ONLY, INPUT_STATES, \ + CONTROL_PROJECTIONS, CONTROL_SIGNALS + +from PsyNeuLink.Globals.Preferences.ComponentPreferenceSet import is_pref_set +from PsyNeuLink.Globals.Preferences.PreferenceSet import PreferenceLevel +from PsyNeuLink.Scheduling.TimeScale import CentralClock, TimeScale + +MODULATED_MECHANISMS = 'modulated_mechanisms' +CONTROL_SIGNAL_NAME = 'LCMechanism_ControlSignal' + +ControlMechanismRegistry = {} + +class LCMechanismError(Exception): + def __init__(self, error_value): + self.error_value = error_value + + +class LCMechanism(ControlMechanism_Base): + """ + LCMechanism( \ + monitor_for_control=None, \ + mode=0.0, \ + modulated_mechanisms=None, \ + params=None, \ + name=None, \ + prefs=None) + + Subclass of `ControlMechanism ` that modulates the `multiplicative parameter + ` of the `function ` of one or more `Mechanisms `. + + Arguments + --------- + + mode : float : default 0.0 + specifies the default value for the mode parameter of the LCMechanism's `function `. + + COMMENT: + monitor_for_control : List[OutputState specification] : default None + specifies set of OutputStates to monitor (see :ref:`LCMechanism_Monitored_OutputStates` for + specification options). + COMMENT + + modulated_mechanisms : List[Mechanism] or *ALL* + specifies the Mechanisms to be modulated by the LCMechanism. + If it is a list, every item must be a Mechanism with a `function ` that implements a + `multiplicative parameter `; alternatively the keyword *ALL* can be used to + specify all of the `ProcessingMechanisms ` in the Composition(s) to which the LCMechanism + belongs. + + params : Optional[Dict[param keyword, param value]] + a `parameter dictionary ` that can be used to specify the parameters + for the Mechanism, parameters for its function, and/or a custom function and its parameters. Values + specified for parameters in the dictionary override any assigned to those parameters in arguments of the + constructor. + + name : str : default LCMechanism- + a string used for the name of the Mechanism. + If not is specified, a default is assigned by `MechanismRegistry` + (see :doc:`Registry ` for conventions used in naming, including for default and duplicate names). + + prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences] + the `PreferenceSet` for the Mechanism. + If it is not specified, a default is assigned using `classPreferences` defined in __init__.py + (see :doc:`PreferenceSet ` for details). + + + Attributes + ---------- + + mode : float : default 0.0 + determines the value for the mode parameter of the LCMechanism's `FitzHughNagumoIntegrator` function. + + COMMENT: + monitoring_mechanism : ObjectiveMechanism + Mechanism that monitors and evaluates the values specified in the LCMechanism's **monitor_for_control** + argument, and transmits the result to the LCMechanism's *ERROR_SIGNAL* + `input_state `. + + monitored_output_states : List[OutputState] + each item is an `OutputState` of a `Mechanism ` specified in the **monitor_for_control** argument + of the LCMechanism's constructor, the `value ` \\s of which serve as the items of the + LCMechanism's `variable `. + COMMENT + + function : `FitzHughNagumoIntegrator` + takes the LCMechanism's `input ` and generates its response ` under + the influence of its `mode ` parameter. + + COMMENT: + VERSIONS FOR SINGLE ControlSignal + control_signals : List[ControlSignal] + contains the LCMechanism's single `ControlSignal`, which sends `ControlProjections` to the + `multiplicative parameter ` of each of the Mechanisms the LCMechanism + controls (listed in its `modulated_mechanisms ` attribute). + + control_projections : List[ControlProjection] + list of `ControlProjections ` sent by the LCMechanism's `ControlSignal`, each of which + projects to the `ParameterState` for the `multiplicative parameter ` of the + `function ` of one of the Mechanisms listed in `modulated_mechanisms + ` attribute. + COMMENT + + control_signals : List[ControlSignal] + contains a ControlSignal for each Mechanism listed in the LCMechanism's `modulated_mechanisms + ` attribute; each ControlSignal sends a `ControlProjections` to the + `ParameterState` for the `multiplicative parameter ` of the `function + corresponding Mechanism. + + control_projections : List[ControlProjection] + list of all of the `ControlProjections ` sent by the `ControlSignals ` listed + in `control_signals `. + + modulated_mechanisms : List[Mechanism] + list of Mechanisms modulated by the LCMechanism. + + modulation : ModulationParam : default ModulationParam.MULTIPLICATIVE + the default form of modulation used by the LCMechanism's `ControlProjections`, + unless they are `individually specified `. + + """ + + componentType = "LCMechanism" + + initMethod = INIT__EXECUTE__METHOD_ONLY + + classPreferenceLevel = PreferenceLevel.TYPE + # Any preferences specified below will override those specified in TypeDefaultPreferences + # Note: only need to specify setting; level will be assigned to TYPE automatically + # classPreferences = { + # kwPreferenceSetName: 'ControlMechanismClassPreferences', + # kp: ...} + + class ClassDefaults(AdaptiveMechanism_Base.ClassDefaults): + # This must be a list, as there may be more than one (e.g., one per control_signal) + variable = defaultControlAllocation + + from PsyNeuLink.Components.Functions.Function import Linear + paramClassDefaults = ControlMechanism_Base.paramClassDefaults.copy() + paramClassDefaults.update({FUNCTION:Integrator, + CONTROL_SIGNALS: None, + CONTROL_PROJECTIONS: None, + }) + + @tc.typecheck + def __init__(self, + default_variable=None, + size=None, + monitor_for_control:tc.optional(list)=None, + mode:tc.optional(float)=0.0, + modulated_mechanisms:tc.optional(tc.any(list,str)) = None, + modulation:tc.optional(_is_modulation_param)=ModulationParam.MULTIPLICATIVE, + params=None, + name=None, + prefs:is_pref_set=None, + context=None): + + # Assign args to params and functionParams dicts (kwConstants must == arg names) + params = self._assign_args_to_param_dicts(mode=mode, + modulated_mechanisms=modulated_mechanisms, + params=params) + + super().__init__(default_variable=default_variable, + size=size, + monitor_for_control=monitor_for_control, + modulation=modulation, + params=params, + name=name, + prefs=prefs, + context=self) + + def _validate_params(self, request_set, target_set=None, context=None): + """Validate SYSTEM, MONITOR_FOR_CONTROL and CONTROL_SIGNALS + + Check that all items in MONITOR_FOR_CONTROL are Mechanisms or OutputStates for Mechanisms in self.system + Check that every item in `modulated_mechanisms ` is a Mechanism + and that its function has a multiplicative_param + """ + + super()._validate_params(request_set=request_set, + target_set=target_set, + context=context) + + # if MONITOR_FOR_CONTROL in target_set: + # for spec in target_set[MONITOR_FOR_CONTROL]: + # if isinstance(spec, MonitoredOutputStatesOption): + # continue + # if isinstance(spec, tuple): + # spec = spec[0] + # if isinstance(spec, (OutputState, Mechanism_Base)): + # spec = spec.name + # if not isinstance(spec, str): + # raise LCMechanismError("Invalid specification in {} arg for {} ({})". + # format(MONITOR_FOR_CONTROL, self.name, spec)) + # # If controller has been assigned to a System, + # # check that all the items in monitor_for_control are in the same System + # # IMPLEMENTATION NOTE: If self.system is None, onus is on doing the validation + # # when the controller is assigned to a System [TBI] + # if self.system: + # if not any((spec is mech.name or spec in mech.output_states.names) + # for mech in self.system.mechanisms): + # raise LCMechanismError("Specification in {} arg for {} ({}) must be a " + # "Mechanism or an OutputState of one in {}". + # format(MONITOR_FOR_CONTROL, self.name, spec, self.system.name)) + + if MODULATED_MECHANISMS in target_set and target_set[MODULATED_MECHANISMS]: + + from PsyNeuLink.Components.States.ModulatorySignals.ControlSignal import ControlSignal + + spec = target_set[MODULATED_MECHANISMS] + + if isinstance (spec, str): + if not spec == ALL: + raise LCMechanismError("A string other than the keyword \'ALL\' was specified for the {} argument " + "the constructor for {}".format(MODULATED_MECHANISMS, self.name)) + + if not isinstance(spec, list): + spec = [spec] + + for mech in spec: + if not isinstance(mech, Mechanism): + raise LCMechanismError("The specification of the {} argument for {} contained an item ({})" + "that is not a Mechanism.".format(MODULATED_MECHANISMS, self.name, mech)) + + if not hasattr(mech.function_object, MULTIPLICATIVE_PARAM): + raise LCMechanismError("The specification of the {} argument for {} contained a Mechanism ({})" + "that does not have a {}.". + format(MODULATED_MECHANISMS, self.name, mech, MULTIPLICATIVE_PARAM)) + + # def _instantiate_monitored_output_states(self, context=None): + # raise LCMechanismError("{0} (subclass of {1}) must implement _instantiate_monitored_output_states". + # format(self.__class__.__name__, + # self.__class__.__bases__[0].__name__)) + + def _instantiate_input_states(self, context=None): + """Instantiate input_value attribute + + Instantiate input_states and monitored_output_states attributes (in case they are referenced) + and assign any OutputStates that project to the input_states to monitored_output_states + + IMPLEMENTATION NOTE: At present, these are dummy assignments, simply to satisfy the requirements for + subclasses of ControlMechanism; in the future, an _instantiate_monitoring_mechanism() + method should be implemented that also implements an _instantiate_monitored_output_states + method, and that can be used to add OutputStates/Mechanisms to be monitored. + """ + + self.monitored_output_states = [] + + if not hasattr(self, INPUT_STATES): + self._input_states = None + elif self.input_states: + for input_state in self.input_states: + for projection in input_state.path_afferents: + self.monitored_output_states.append(projection.sender) + + + + def _instantiate_output_states(self, context=None): + """Instantiate ControlSignal and assign ControlProjections to Mechanisms in self.modulated_mechanisms + + If **modulated_mechanisms** argument of constructor was specified as *ALL*, + assign all ProcessingMechanisms in Compositions to which LCMechanism belongs to self.modulated_mechanisms + Instantiate ControlSignal with Projections to the ParameterState for the multiplicative parameter of every + Mechanism listed in self.modulated_mechanisms + + Returns ControlSignal (OutputState) + """ + from PsyNeuLink.Components.States.ModulatorySignals.ControlSignal import ControlSignal + from PsyNeuLink.Components.States.ParameterState import _get_parameter_state + from PsyNeuLink.Components.Projections.ModulatoryProjections.ControlProjection import ControlProjection + from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ProcessingMechanism import ProcessingMechanism_Base + + # *ALL* is specified for modulated_mechanisms: + # assign all Processing Mechanisms in the LCMechanism's Composition(s) to its modulated_mechanisms attribute + if isinstance(self.modulated_mechanisms, str) and self.modulated_mechanisms is ALL: + self.modulated_mechanisms = [] + for system in self.systems: + for mech in system.mechanisms: + if isinstance(mech, ProcessingMechanism_Base) and hasattr(mech.function, MULTIPLICATIVE_PARAM): + self.modulated_mechanisms.append(mech) + for process in self.processes: + for mech in process.mechanisms: + if isinstance(mech, ProcessingMechanism_Base) and hasattr(mech.function, MULTIPLICATIVE_PARAM): + self.modulated_mechanisms.append(mech) + + # # MODIFIED 9/3/17 OLD [ASSIGN ALL ControlProjections TO A SINGLE ControlSignal] + # # Get the ParameterState for the multiplicative parameter of each Mechanism in self.modulated_mechanisms + # multiplicative_params = [] + # for mech in self.modulated_mechanisms: + # multiplicative_params.append(mech._parameter_states[mech.function_object.multiplicative_param]) + # + # # Create specification for **control_signals** argument of ControlSignal constructor + # self.control_signals = [{CONTROL_SIGNAL_NAME:multiplicative_params}] + + # MODIFIED 9/3/17 NEW [ASSIGN EACH ControlProjection TO A DIFFERENT ControlSignal] + # Get the name of the multiplicative parameter of each Mechanism in self.modulated_mechanisms + multiplicative_param_names = [] + for mech in self.modulated_mechanisms: + multiplicative_param_names.append(mech.function_object.multiplicative_param) + + # Create specification for **control_signals** argument of ControlSignal constructor + self.control_signals = [] + for mech, mult_param_name in zip(self.modulated_mechanisms, multiplicative_param_names): + self.control_signals.append((mult_param_name, mech)) + + # MODIFIED 9/3/17 END + + + + super()._instantiate_output_states(context=context) + + # def _instantiate_attributes_after_function(self, context=None): + # """Implment ControlSignals specified in control_signals arg or "locally" in parameter specification(s) + # + # Calls super's instantiate_attributes_after_function, which calls _instantiate_output_states; + # that insures that any ControlSignals specified in control_signals arg are instantiated first + # Then calls _assign_as_controller to instantiate any ControlProjections/ControlSignals specified + # along with parameter specification(s) (i.e., as part of a (, ControlProjection) tuple + # """ + # + # super()._instantiate_attributes_after_function(context=context) + # + def _execute(self, + variable=None, + runtime_params=None, + clock=CentralClock, + time_scale=TimeScale.TRIAL, + context=None): + """Updates LCMechanism's ControlSignal based on input and mode parameter value + """ + return self.function() + + @tc.typecheck + def add_modulated_mechanisms(self, mechanisms:list): + """Add ControlProjections to the specified Mechanisms. + """ + + request_set = {MODULATED_MECHANISMS:mechanisms} + target_set = {} + self._validate_params(request_set=request_set, target_set=target_set) + + # Assign ControlProjection from the LCMechanism's ControlSignal + # to the ParameterState for the multiplicative parameter of each Mechanism in mechanisms + multiplicative_params = [] + for mech in mechanisms: + self.modulated_mechanisms.append(mech) + parameter_state = mech._parameter_states[mech.multiplicative_param] + control_projection = ControlProjection(sender=self.control_signals[0], + receiver=parameter_state) + self.control_projections.append(control_projection) + + @tc.typecheck + def remove_modulated_mechanisms(self, mechanisms:list): + """Remove the ControlProjections to the specified Mechanisms. + """ + + for mech in mechanisms: + if not mech in self.modulated_mechanisms: + continue + + parameter_state = mech._parameter_states[mech.multiplicative_param] + + # Get ControlProjection + for projection in parameter_state.mod_afferents: + if projection.sender.owner is self: + control_projection = projection + break + + # Delete ControlProjection ControlSignal's list of efferents + index = self.control_signals[0].efferents[control_projection] + del(self.control_signals[0].efferents[index]) + + # Delete ControlProjection from recipient ParameterState + index = parameter_state.mod_afferents[control_projection] + del(parameter_state.mod_afferents[index]) + + # Delete ControlProjection from self.control_projections + index = self.control_projections[control_projection] + del(self.control_projections[index]) + + # Delete ControlProjection + del(control_projection) + + # Delete Mechanism from self.modulated_mechanisms + index = self.modulated_mechanisms.index(mech) + del(self.modulated_mechanisms[index]) + + def show(self): + """Display the `OutputStates ` monitored by the LCMechanism's `monitoring_mechanism` + and the `multiplicative parameters ` modulated by the LCMechanism. + """ + + print ("\n---------------------------------------------------------") + + print ("\n{0}".format(self.name)) + print("\n\tMonitoring the following Mechanism OutputStates:") + if self.monitoring_mechanism is None: + print ("\t\tNone") + else: + for state in self.monitoring_mechanism.input_states: + for projection in state.path_afferents: + monitored_state = projection.sender + monitored_state_mech = projection.sender.owner + monitored_state_index = self.monitored_output_states.index(monitored_state) + + weight = self.monitor_for_control_weights_and_exponents[monitored_state_index][0] + exponent = self.monitor_for_control_weights_and_exponents[monitored_state_index][1] + + print ("\t\t{0}: {1} (exp: {2}; wt: {3})". + format(monitored_state_mech.name, monitored_state.name, weight, exponent)) + + print ("\n\tModulating the following parameters:".format(self.name)) + # Sort for consistency of output: + state_names_sorted = sorted(self.output_states.names) + for state_name in state_names_sorted: + for projection in self.output_states[state_name].efferents: + print ("\t\t{0}: {1}".format(projection.receiver.owner.name, projection.receiver.name)) + + print ("\n---------------------------------------------------------") diff --git a/Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py b/Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py new file mode 100644 index 00000000000..e918361530d --- /dev/null +++ b/Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py @@ -0,0 +1,24 @@ +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.ControlMechanism.ControlMechanism import ControlMechanism_Base +from PsyNeuLink.Library.Mechanisms.AdaptiveMechanisms.ControlMechanisms.LCMechanism import LCMechanism +from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.TransferMechanism import TransferMechanism +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.GatingMechanism.GatingMechanism import GatingMechanism +from PsyNeuLink.Components.Functions.Function import ModulationParam +from PsyNeuLink.Components.Functions.Function import Linear, Logistic + +from PsyNeuLink.Components.Process import process +from PsyNeuLink.Components.System import system +from PsyNeuLink.Globals.Keywords import * + +my_mech_1 = TransferMechanism(function=Linear, name='my_linear_mechanism') +my_mech_2 = TransferMechanism(function=Logistic, name='my_logistic_mechanism') + +LC = LCMechanism(modulated_mechanisms=[my_mech_1, my_mech_2], name='my_LC') + +LC.show() + +my_gating_mechanism = GatingMechanism(gating_signals=[{'GATE_ALL': [my_mech_1, + my_mech_2]}], + modulation=ModulationParam.ADDITIVE) + + +my_gating_mechanism.show() \ No newline at end of file diff --git a/docs/source/ControlMechanisms.rst b/docs/source/ControlMechanisms.rst index c9da5ce742a..5fadc65442e 100644 --- a/docs/source/ControlMechanisms.rst +++ b/docs/source/ControlMechanisms.rst @@ -13,3 +13,4 @@ ControlMechanisms :maxdepth: 1 EVCMechanism + LCMechanism diff --git a/docs/source/EVCMechanism.rst b/docs/source/EVCMechanism.rst index 9eeeaebd390..22755819893 100644 --- a/docs/source/EVCMechanism.rst +++ b/docs/source/EVCMechanism.rst @@ -2,4 +2,5 @@ EVCMechanism ============ .. automodule:: Library.Mechanisms.AdaptiveMechanisms.ControlMechanisms.EVC.EVCMechanism - :exclude-members: random, LinearCombination \ No newline at end of file + :members: + :exclude-members: random, LinearCombination, Linear \ No newline at end of file diff --git a/docs/source/LCMechanism.rst b/docs/source/LCMechanism.rst new file mode 100644 index 00000000000..e036a94c3f5 --- /dev/null +++ b/docs/source/LCMechanism.rst @@ -0,0 +1,6 @@ +LCMechanism +=========== + +.. automodule:: Library.Mechanisms.AdaptiveMechanisms.ControlMechanisms.LCMechanism + :members: + :exclude-members: random, LinearCombination, Linear \ No newline at end of file From 29954a69cb9428cd92e9cfc89fca3401638b8ae1 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Mon, 4 Sep 2017 21:42:12 -0400 Subject: [PATCH 22/69] =?UTF-8?q?=E2=80=A2=20ObjectiveMechanism=20(#445)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - rename ERROR_SIGNAL OutputState -> OUTCOME --- .../ControlMechanism/ControlMechanism.py | 4 +- .../LearningMechanism/LearningAuxilliary.py | 6 +-- .../LearningMechanism/LearningMechanism.py | 37 ++++++++++--------- .../ObjectiveMechanism.py | 22 +++++------ .../LearningProjection.py | 7 ++-- .../PathwayProjections/MappingProjection.py | 7 ++-- PsyNeuLink/Components/System.py | 2 +- .../ControlMechanisms/EVC/EVCMechanism.py | 4 +- .../ComparatorMechanism.py | 26 ++++++------- TODO List.py | 4 +- 10 files changed, 60 insertions(+), 59 deletions(-) diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py index ec9553cd237..04d137712fa 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py @@ -113,12 +113,12 @@ (as well as the ObjectiveMechanism's `monitored_values ` attribute). The `monitored_output_states ` are evaluated by the ObjectiveMechanism's `function `; the result is assigned as the `value - ` of the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState ` + ` of the ObjectiveMechanism's *OUTCOME* `OutputState ` and (by way of a `MappingProjection` -- see below) to the ControlMechanism's *ERROR_SIGNAL* `InputState`. This information is used by the ControlMechanism to set the `allocation ` for each of the ControlMechanism's ControlSignals (see `ControlMechanism_Function`). .. - * a `MappingProjection` that projects from the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState + * a `MappingProjection` that projects from the ObjectiveMechanism's *OUTCOME* `OutputState ` to the ControlMechanism's *ERROR_SIGNAL* `InputState`. .. * `MappingProjections ` from Mechanisms or OutputStates specified in diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningAuxilliary.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningAuxilliary.py index cbbdf3437ef..408fd735e44 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningAuxilliary.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningAuxilliary.py @@ -135,10 +135,10 @@ from PsyNeuLink.Components.Component import function_type, method_type from PsyNeuLink.Components.Functions.Function import BackPropagation, Linear, Reinforcement -from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.LearningMechanism.LearningMechanism import ACTIVATION_INPUT, \ - ACTIVATION_OUTPUT, LearningMechanism +from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.LearningMechanism.LearningMechanism \ + import LearningMechanism, ACTIVATION_INPUT, ACTIVATION_OUTPUT, ERROR_SIGNAL from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism \ - import ERROR_SIGNAL, ObjectiveMechanism + import OUTCOME, ObjectiveMechanism from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ProcessingMechanism import ProcessingMechanism_Base from PsyNeuLink.Components.Projections.ModulatoryProjections.LearningProjection import LearningProjection from PsyNeuLink.Components.Projections.PathwayProjections.MappingProjection import MappingProjection diff --git a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningMechanism.py b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningMechanism.py index dcd77e7e4b8..42b6060fa37 100644 --- a/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningMechanism.py @@ -118,18 +118,19 @@ .. _LearningMechanism_Input_Error_Signal: -* *ERROR_SIGNAL* - receives the value of an error signal from either a `ComparatorMechanism` or another - `LearningMechanism `. If the `primary_learned_projection` projects to the - `TERMINAL` Mechanism of the Process or System being learned, or is not part of a `multilayer learning sequence - `, then the `error_signal` comes from a ComparatorMechanism. If the - `primary_learned_projection` is part of a `multilayer learning sequence `, - then the `error_signal` comes from the next LearningMechanism in the sequence (i.e., the one associated with the - `output_source`). It is used by the LearningMechanism's `function ` to calculate the - `learning_signal `; note that the value of the *ERROR_SIGNAL* InputState may - not be the same as that of the `error_signal ` attribute or *ERROR_SIGNAL* - `OutputState ` (see `note ` below). - The `value ` of the *ERROR_SIGNAL* InputState is assigned as the third item of the - LearningMechanism's `variable ` attribute. +* *ERROR_SIGNAL* - receives the `value from the *OUTCOME* OutputState of a `ComparatorMechanism` or + the *ERROR_SIGNAL* OutputState of another `LearningMechanism `. If the + `primary_learned_projection` projects to the `TERMINAL` Mechanism of the Process or System being learned, + or is not part of a `multilayer learning sequence `, + then the `error_signal` comes from a ComparatorMechanism. If the `primary_learned_projection` is part of a + `multilayer learning sequence `, then the `error_signal` comes from the next + LearningMechanism in the sequence (i.e., the one associated with the `output_source`). It is used by the + LearningMechanism's `function ` to calculate the `learning_signal + `; note that the value of the *ERROR_SIGNAL* InputState may not be the same as + that of the `error_signal ` attribute or *ERROR_SIGNAL* `OutputState + ` (see `note ` below). The `value + ` of the *ERROR_SIGNAL* InputState is assigned as the third item of the LearningMechanism's + `variable ` attribute. .. _LearningMechanism_Function: @@ -334,7 +335,7 @@ .. * from the Process or System to the ComparatorMechanism's *TARGET* `InputState `; .. -* from the ComparatorMechanism's *ERROR_SIGNAL* `output_state ` to the +* from the ComparatorMechanism's *OUTCOME* `output_state ` to the LearningMechanism's *ERROR_SIGNAL* `InputState `. In addition, a `LearningProjection` is created from the `LearningSignal` for the @@ -490,7 +491,7 @@ import BackPropagation, ModulationParam, _is_modulation_param, is_function_type from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.AdaptiveMechanism import AdaptiveMechanism_Base from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism \ - import ERROR_SIGNAL, ObjectiveMechanism + import OUTCOME, ObjectiveMechanism from PsyNeuLink.Components.Projections.PathwayProjections.MappingProjection import MappingProjection from PsyNeuLink.Components.Projections.Projection \ import Projection_Base, _is_projection_spec, _validate_receiver, projection_keywords @@ -528,10 +529,10 @@ def _is_learning_spec(spec): ERROR_OUTPUT_INDEX = 2 ERROR_SIGNAL_INDEX = 3 -# Used to name input_states: +# Used to name input_states and output_states: ACTIVATION_INPUT = 'activation_input' # InputState ACTIVATION_OUTPUT = 'activation_output' # InputState - +ERROR_SIGNAL = 'error_signal' input_state_names = [ACTIVATION_INPUT, ACTIVATION_OUTPUT, ERROR_SIGNAL] output_state_names = [ERROR_SIGNAL, LEARNING_SIGNAL] @@ -1352,7 +1353,7 @@ def _instantiate_error_signal_projection(sender, receiver): """ if isinstance(sender, ObjectiveMechanism): - sender = sender.output_states[ERROR_SIGNAL] + sender = sender.output_states[OUTCOME] elif isinstance(sender, LearningMechanism): sender = sender.output_states[ERROR_SIGNAL] else: @@ -1376,4 +1377,4 @@ def _instantiate_error_signal_projection(sender, receiver): return MappingProjection(sender=sender, receiver=receiver, matrix=IDENTITY_MATRIX, - name = sender.owner.name + ' ' + ERROR_SIGNAL) + name = sender.owner.name + ' ' + OUTCOME) diff --git a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanism.py b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanism.py index 3fd5856b2b3..ba6f50a483c 100644 --- a/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanism.py +++ b/PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanism.py @@ -64,7 +64,7 @@ `input_states ` attribute. The ObjectiveMechanism's `function ` uses these values to compute an `objective (or "loss") function `_, that is assigned as the value of its -*ERROR_SIGNAL* (`primary `) OutputState. By default, it uses a `LinearCombination` function to +*OUTCOME* (`primary `) OutputState. By default, it uses a `LinearCombination` function to sum the values of its InputStates. However, the `function ` can be customized to calculate other quantities (differences, ratios, etc. -- see `example ` below). It can also be replaced with any Python function @@ -260,7 +260,7 @@ ROLE = 'role' MONITORED_VALUES = 'monitored_values' MONITORED_VALUE_NAME_SUFFIX = '_Monitor' -ERROR_SIGNAL = 'error_signal' +OUTCOME = 'outcome' # This is a convenience class that provides list of standard_output_state names in IDE class OBJECTIVE_OUTPUT(): @@ -269,14 +269,14 @@ class OBJECTIVE_OUTPUT(): `Standard OutputStates ` for `ObjectiveMechanism`: - .. _OBJECTIVE_MECHANISM_ERROR_SIGNAL + .. _OBJECTIVE_MECHANISM_OUTCOME - *ERROR_SIGNAL* : 1d np.array + *OUTCOME* : 1d np.array the value of the objective or "loss" function computed based on the ObjectiveMechanism's `function ` """ - ERROR_SIGNAL=ERROR_SIGNAL + OUTCOME=OUTCOME class ObjectiveMechanismError(Exception): @@ -293,7 +293,7 @@ class ObjectiveMechanism(ProcessingMechanism_Base): monitored_values, \ input_states=None, \ function=LinearCombination, \ - output_states=[ERROR_SIGNAL], \ + output_states=[OUTCOME], \ params=None, \ name=None, \ prefs=None) @@ -350,7 +350,7 @@ class ObjectiveMechanism(ProcessingMechanism_Base): specifies the function used to evaluate the values listed in :keyword:`monitored_values` (see `function ` for details. - output_states : List[OutputState, value, str or dict] or Dict[] : default [ERROR_SIGNAL] + output_states : List[OutputState, value, str or dict] or Dict[] : default [OUTCOME] specifies the OutputStates for the Mechanism; role: Optional[LEARNING, CONTROL] @@ -411,14 +411,14 @@ class ObjectiveMechanism(ProcessingMechanism_Base): output_state : OutputState contains the `primary OutputState ` of the ObjectiveMechanism; the default is - its *ERROR_SIGNAL* OutputState (see ObjectiveMechanism_Structure), the value of which is equal to the + its *OUTCOME* OutputState (see ObjectiveMechanism_Structure), the value of which is equal to the `value ` attribute of the ObjectiveMechanism. output_states : ContentAddressableList[OutputState] - contains, by default, only the *ERROR_SIGNAL* (primary) OutputState of the ObjectiveMechanism. + contains, by default, only the *OUTCOME* (primary) OutputState of the ObjectiveMechanism. output_values : 2d np.array - contains one item that is the value of the *ERROR_SIGNAL* OutputState. + contains one item that is the value of the *OUTCOME* OutputState. name : str : default ObjectiveMechanism- the name of the Mechanism. @@ -463,7 +463,7 @@ def __init__(self, monitored_values:tc.any(list, dict), input_states=None, function=LinearCombination, - output_states:tc.optional(tc.any(list, dict))=[ERROR_SIGNAL], + output_states:tc.optional(tc.any(list, dict))=[OUTCOME], params=None, name=None, prefs:is_pref_set=None, diff --git a/PsyNeuLink/Components/Projections/ModulatoryProjections/LearningProjection.py b/PsyNeuLink/Components/Projections/ModulatoryProjections/LearningProjection.py index a509dfdb105..4ac36c0ead6 100644 --- a/PsyNeuLink/Components/Projections/ModulatoryProjections/LearningProjection.py +++ b/PsyNeuLink/Components/Projections/ModulatoryProjections/LearningProjection.py @@ -148,9 +148,8 @@ from PsyNeuLink.Components.Component import InitStatus, parameter_keywords from PsyNeuLink.Components.Functions.Function import BackPropagation, Linear, is_function_type from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.LearningMechanism.LearningMechanism \ - import LearningMechanism -from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism \ - import ERROR_SIGNAL, ObjectiveMechanism + import LearningMechanism, ERROR_SIGNAL +from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism import ObjectiveMechanism from PsyNeuLink.Components.Projections.ModulatoryProjections.ModulatoryProjection import ModulatoryProjection_Base from PsyNeuLink.Components.Projections.PathwayProjections.MappingProjection import MappingProjection from PsyNeuLink.Components.Projections.Projection import Projection_Base, _is_projection_spec, projection_keywords @@ -158,7 +157,7 @@ from PsyNeuLink.Components.States.OutputState import OutputState from PsyNeuLink.Components.States.ParameterState import ParameterState from PsyNeuLink.Globals.Keywords import ENABLED, FUNCTION, FUNCTION_PARAMS, INITIALIZING, INTERCEPT, LEARNING, \ - LEARNING_PROJECTION, MATRIX, PARAMETER_STATES, PROJECTION_SENDER, SLOPE + LEARNING_PROJECTION, MATRIX, PARAMETER_STATES, PROJECTION_SENDER, SLOPE from PsyNeuLink.Globals.Preferences.ComponentPreferenceSet import is_pref_set from PsyNeuLink.Globals.Preferences.PreferenceSet import PreferenceLevel from PsyNeuLink.Globals.Utilities import iscompatible, parameter_spec diff --git a/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py b/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py index 1358ce4adb3..0bf8ead3137 100644 --- a/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py +++ b/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py @@ -47,9 +47,10 @@ (see `LearningMechanism_Learning_Configurations` for details); .. * by a `ControlMechanism `, from the `ObjectiveMechanism` that `it creates - ` to its *ERROR_SIGNAL* ``, and from the `OutputStates ` - listed in the ControlMechanism's `monitored_output_states ` - attribute) to the ObjectiveMechanism (as described above; an `IDENTITY_MATRIX` is used for all of these). + ` to its *ERROR_SIGNAL* `InputState`, and from the `OutputStates + ` listed in the ControlMechanism's `monitored_output_states + ` attribute) to the ObjectiveMechanism (as described above; an + `IDENTITY_MATRIX` is used for all of these). .. _Mapping_Matrix_Specification: diff --git a/PsyNeuLink/Components/System.py b/PsyNeuLink/Components/System.py index ff1f420d74c..8b36cd432f6 100644 --- a/PsyNeuLink/Components/System.py +++ b/PsyNeuLink/Components/System.py @@ -385,7 +385,7 @@ def __str__(self): # FIX: ONCE IT IS IN THE GRAPH, IT IS NOT LONGER EASY TO DETERMINE WHICH IS WHICH IS WHICH (SINCE SETS ARE NOT ORDERED) from PsyNeuLink.Components import SystemDefaultControlMechanism -from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism import ObjectiveMechanism +from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism import ObjectiveMechanism, OUTCOME from PsyNeuLink.Components.Process import process # System factory method: diff --git a/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py index ffe1427af6f..1d026ebc72f 100644 --- a/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py +++ b/PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py @@ -63,7 +63,7 @@ ` attribute, and the function specified in the **outcome_function** argument of the EVCMechanism's constructor to specify the ObjectiveMechanism's `function `; .. -* a `MappingProjection` that projects from the ObjectiveMechanism's *ERROR_SIGNAL* `OutputState +* a `MappingProjection` that projects from the ObjectiveMechanism's *OUTCOME* `OutputState ` to the EVCMechanism's `primary InputState `. .. * a `prediction Mechanism ` for each `ORIGIN` Mechanism in its `system @@ -91,7 +91,7 @@ --------- An EVCMechanism belongs to a `System` (identified in its `system ` attribute), and has a -specialized set of Components that support its operation. It receives its input from the *ERROR_SIGNAL* `OutputState +specialized set of Components that support its operation. It receives its input from the *OUTCOME* `OutputState ` of an `ObjectiveMechanism` (identified in its `monitoring_mechanism ` attribute), and has a specialized set of `functions ` and `mechanisms ` that it can use to simulate and evaluate the performance of its diff --git a/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py b/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py index 5286f6e80a6..1c25934827a 100644 --- a/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py +++ b/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py @@ -13,8 +13,8 @@ -------- A ComparatorMechanism is a subclass of `ObjectiveMechanism` that receives two inputs (a sample and a target), compares -them using its `function `, and places the calculted discrepancy between the two in its -*ERROR_SIGNAL* `output_state `. +them using its `function `, and places the calculated discrepancy between the two in its +*OUTCOME* `output_state `. .. _ComparatorMechanism_Creation: @@ -64,7 +64,7 @@ ` of *-1* and the target a `weight ` of *1*, so that the sample is subtracted from the target. However, the `function ` can be customized, so long as it is replaced with one that takes two arrays with the same format as its inputs, and generates a similar array as its result. -The result is assigned as the value of the Comparator Mechanism's *ERROR_SIGNAL* (`primary `) +The result is assigned as the value of the Comparator Mechanism's *OUTCOME* (`primary `) OutputState. .. _ComparatorMechanism_Function: @@ -74,7 +74,7 @@ When an ComparatorMechanism is executed, it updates its input_states with the values of the OutputStates specified in its **sample** and **target** arguments, and then uses its `function ` to -compare these. By default, the result is assigned as to the `value ` of its *ERROR_SIGNAL* +compare these. By default, the result is assigned as to the `value ` of its *OUTCOME* `output_state `, and as the first item of the Mechanism's `output_values ` attribute. @@ -130,7 +130,7 @@ from PsyNeuLink.Components.Functions.Function import LinearCombination from PsyNeuLink.Components.Mechanisms.Mechanism import Mechanism_Base from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism \ - import ERROR_SIGNAL, MONITORED_VALUES, ObjectiveMechanism + import OUTCOME, MONITORED_VALUES, ObjectiveMechanism from PsyNeuLink.Components.ShellClasses import Mechanism from PsyNeuLink.Components.States.InputState import InputState from PsyNeuLink.Components.States.OutputState import OutputState, PRIMARY_OUTPUT_STATE, StandardOutputStates @@ -182,7 +182,7 @@ class ComparatorMechanism(ObjectiveMechanism): target, \ input_states=[SAMPLE,TARGET] \ function=LinearCombination(weights=[[-1],[1]], \ - input_states=[ERROR_SIGNAL] \ + output_states=[OUTCOME] \ params=None, \ name=None, \ prefs=None) @@ -232,7 +232,7 @@ class ComparatorMechanism(ObjectiveMechanism): function : Function, function or method : default Distance(metric=DIFFERENCE) specifies the `function ` used to compare the `sample` with the `target`. - output_states : List[OutputState, value, str or dict] or Dict[] : default [ERROR_SIGNAL] + output_states : List[OutputState, value, str or dict] or Dict[] : default [OUTCOME] specifies the OutputStates for the Mechanism; params : Optional[Dict[param keyword, param value]] @@ -287,14 +287,14 @@ class ComparatorMechanism(ObjectiveMechanism): output_state : OutputState contains the `primary ` OutputState of the ComparatorMechanism; the default is - its *ERROR_SIGNAL* OutputState, the value of which is equal to the `value ` + its *OUTCOME* OutputState, the value of which is equal to the `value ` attribute of the ComparatorMechanism. output_states : ContentAddressableList[OutputState] - contains, by default, only the *ERROR_SIGNAL* (primary) OutputState of the ComparatorMechanism. + contains, by default, only the *OUTCOME* (primary) OutputState of the ComparatorMechanism. output_values : 2d np.array - contains one item that is the value of the *ERROR_SIGNAL* OutputState. + contains one item that is the value of the *OUTCOME* OutputState. name : str : default ComparatorMechanism- the name of the Mechanism. @@ -340,7 +340,7 @@ def __init__(self, target:tc.optional(tc.any(OutputState, Mechanism_Base, dict, is_numeric, str))=None, input_states=[SAMPLE, TARGET], function=LinearCombination(weights=[[-1], [1]]), - output_states:tc.optional(tc.any(list, dict))=[ERROR_SIGNAL, MSE], + output_states:tc.optional(tc.any(list, dict))=[OUTCOME, MSE], params=None, name=None, prefs:is_pref_set=None, @@ -361,8 +361,8 @@ def __init__(self, value=None) # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment - # (in this case, to [ERROR_SIGNAL, {NAME= MSE}]), but fails to expose default in IDE - # output_states = output_states or [ERROR_SIGNAL, MSE] + # (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE + # output_states = output_states or [OUTCOME, MSE] # Create a StandardOutputStates object from the list of stand_output_states specified for the class if not isinstance(self.standard_output_states, StandardOutputStates): diff --git a/TODO List.py b/TODO List.py index 5c8c8242548..76ce7f0c169 100644 --- a/TODO List.py +++ b/TODO List.py @@ -404,8 +404,8 @@ # Note: multiple GatingProjections can be assigned to the same GatingSignal to achieve "divergent gating" # (that is, gating of many states with a single value -- e.g., LC) # DOCUMENTATION: revise LearningMechanism docstring to include output_state attribute, and describe situation with -# (multiple possible) LearningSignal entries, their relatioship to learing_signal attribute, and -# the ERROR_SIGNAL OutputState. +# (multiple possible) LearningSignal entries, their relatioship to learning_signal attribute, and +# the OUTCOME OutputState. # DOCUMENTATION: add output_states to attribute in docstring for ControlMechanism and GatingMechanism # DOCUMENTATION: add section on LearningSignals to LearningMechanism docstring: # note that default (and most common case) is for a single LearningSignal From 7fa92d1fdc309109f390c1bee6b799299ac764da Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 5 Sep 2017 11:32:08 -0400 Subject: [PATCH 23/69] adding _euler and _runge_kutta_4 as helper functions on the integrator class so that any integrator function can implement this solvers --- PsyNeuLink/Components/Functions/Function.py | 29 +++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index da63c904557..3dec96cf944 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -3365,6 +3365,35 @@ def _try_execute_param(self, param, var): param = param() return param + def _euler(self, previous_value, previous_time, slope, time_step_size): + + if callable(self.slope): + slope = self.slope(previous_time, previous_value) + else: + slope = self.slope + + return previous_value + slope*time_step_size + + def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): + + if callable(self.slope): + slope_approx_1 = self.slope(previous_time, + previous_value) + slope_approx_2 = self.slope(previous_time + time_step_size/2, + previous_value + (0.5 * time_step_size * slope_approx_1)) + slope_approx_3 = self.slope(previous_time + time_step_size/2, + previous_value + (0.5 * time_step_size * slope_approx_2)) + slope_approx_4 = self.slope(previous_time + time_step_size, + previous_value + (time_step_size * slope_approx_3)) + value = previous_value \ + + (time_step_size/6)*(slope_approx_1 + 2*(slope_approx_2 + slope_approx_3) + slope_approx_4) + else: + slope = self.slope + value = previous_value + time_step_size*slope + + return value + + def function(self, *args, **kwargs): raise FunctionError("Integrator is not meant to be called explicitly") From 2ed63c10b824e46b44fec022122f0167a41afab7 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 5 Sep 2017 11:52:36 -0400 Subject: [PATCH 24/69] adding implementation of FHN as a function which calls the rk4 helper function --- PsyNeuLink/Components/Functions/Function.py | 546 ++++++++++++++------ 1 file changed, 390 insertions(+), 156 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 3dec96cf944..571f4ce16ce 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4570,10 +4570,10 @@ def function(self, return adjusted_value -class AccumulatorIntegrator( +class FHNIntegrator( Integrator): # -------------------------------------------------------------------------------- """ - AccumulatorIntegrator( \ + FHNIntegrator( \ default_variable=None, \ rate=1.0, \ noise=0.0, \ @@ -4585,11 +4585,11 @@ class AccumulatorIntegrator( prefs=None, \ ) - .. _AccumulatorIntegrator: + .. _FHNIntegrator: - Integrates prior value by multiplying `previous_value ` by `rate - ` and adding `increment ` and `noise - `. Ignores `variable `). + Integrates prior value by multiplying `previous_value ` by `rate + ` and adding `increment ` and `noise + `. Ignores `variable `). Arguments --------- @@ -4599,25 +4599,25 @@ class AccumulatorIntegrator( integrated. rate : float, list or 1d np.array : default 1.0 - specifies the multiplicative decrement of `previous_value ` (i.e., + specifies the multiplicative decrement of `previous_value ` (i.e., the rate of exponential decay). If it is a list or array, it must be the same length as - `variable `. + `variable `. increment : float, list or 1d np.array : default 0.0 - specifies an amount to be added to `prevous_value ` in each call to - `function ` (see `increment ` for details). - If it is a list or array, it must be the same length as `variable ` - (see `increment ` for details). + specifies an amount to be added to `prevous_value ` in each call to + `function ` (see `increment ` for details). + If it is a list or array, it must be the same length as `variable ` + (see `increment ` for details). noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 - specifies random value to be added to `prevous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` (see `noise ` for details). + specifies random value to be added to `prevous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` (see `noise ` for details). initializer float, list or 1d np.array : default 0.0 specifies starting value for integration. If it is a list or array, it must be the same length as - `default_variable ` (see `initializer - ` for details). + `default_variable ` (see `initializer + ` for details). params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the @@ -4636,31 +4636,31 @@ class AccumulatorIntegrator( ---------- variable : number or np.array - **Ignored** by the AccumulatorIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for + **Ignored** by the FHNIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for integrator functions that depend on both a prior value and a new value (variable). rate : float or 1d np.array - determines the multiplicative decrement of `previous_value ` (i.e., the - rate of exponential decay) in each call to `function `. If it is a list or - array, it must be the same length as `variable ` and each element is - used to multiply the corresponding element of `previous_value ` (i.e., + determines the multiplicative decrement of `previous_value ` (i.e., the + rate of exponential decay) in each call to `function `. If it is a list or + array, it must be the same length as `variable ` and each element is + used to multiply the corresponding element of `previous_value ` (i.e., it is used for Hadamard multiplication). If it is a scalar or has a single element, its value is used to - multiply all the elements of `previous_value `. + multiply all the elements of `previous_value `. increment : float, function, list, or 1d np.array - determines the amount added to `previous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` and each element is added to the corresponding element of - `previous_value ` (i.e., it is used for Hadamard addition). If it is a + determines the amount added to `previous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` and each element is added to the corresponding element of + `previous_value ` (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all the elements of `previous_value - `. + `. noise : float, function, list, or 1d np.array - determines a random value to be added in each call to `function `. - If it is a list or array, it must be the same length as `variable ` and - each element is added to the corresponding element of `previous_value ` + determines a random value to be added in each call to `function `. + If it is a list or array, it must be the same length as `variable ` and + each element is added to the corresponding element of `previous_value ` (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all - the elements of `previous_value `. If it is a function, it will be + the elements of `previous_value `. If it is a function, it will be executed separately and added to each element. .. note:: @@ -4672,19 +4672,19 @@ class AccumulatorIntegrator( initializer : float, 1d np.array or list determines the starting value for integration (i.e., the value to which `previous_value - ` is set. If initializer is a list or array, it must be the same length - as `variable `. + ` is set. If initializer is a list or array, it must be the same length + as `variable `. TBI: Initializer may be a function or list/array of functions. - If initializer is specified as a single float or function, while `variable ` is + If initializer is specified as a single float or function, while `variable ` is a list or array, initializer will be applied to each variable element. In the case of an initializer function, this means that the function will be executed separately for each variable element. previous_value : 1d np.array : default ClassDefaults.variable - stores previous value to which `rate ` and `noise ` + stores previous value to which `rate ` and `noise ` will be added. owner : Mechanism @@ -4717,21 +4717,26 @@ class ClassDefaults(Integrator.ClassDefaults): @tc.typecheck def __init__(self, default_variable=None, - # rate: parameter_spec = 1.0, - rate=None, - noise=0.0, - increment = None, - initializer=ClassDefaults.variable, + initial_w = 0.0, + initial_v = 0.0, + time_step_size = 1.0, + t_0 = 0.0, + a = 0.08, + b = 0.7, + c =0.8, params: tc.optional(dict) = None, owner=None, prefs: is_pref_set = None, - context="AccumulatorIntegrator Init"): + context="FHNIntegrator Init"): # Assign args to params and functionParams dicts (kwConstants must == arg names) - params = self._assign_args_to_param_dicts(rate=rate, - initializer=initializer, - noise=noise, - increment = increment, + params = self._assign_args_to_param_dicts(initial_v = initial_v, + initial_w = initial_w, + time_step_size = time_step_size, + t_0 = t_0, + a = a, + b = b, + c = c, params=params) super().__init__( @@ -4741,96 +4746,13 @@ def __init__(self, prefs=prefs, context=context) - self.previous_value = self.initializer + self.previous_v = self.initial_v + self.previous_w = self.initial_w + self.previous_t = self.t_0 self.instance_defaults.variable = self.initializer self.auto_dependent = True - def _accumulator_check_args(self, variable=None, params=None, target_set=None, context=None): - """validate params and assign any runtime params. - - Called by AccumulatorIntegrator to validate params - Validation can be suppressed by turning parameter_validation attribute off - target_set is a params dictionary to which params should be assigned; - otherwise, they are assigned to paramsCurrent; - - Does the following: - - assign runtime params to paramsCurrent - - validate params if PARAM_VALIDATION is set - - :param params: (dict) - params to validate - :target_set: (dict) - set to which params should be assigned (default: self.paramsCurrent) - :return: - """ - - # PARAMS ------------------------------------------------------------ - - # If target_set is not specified, use paramsCurrent - if target_set is None: - target_set = self.paramsCurrent - - # # MODIFIED 11/27/16 OLD: - # # If parameter_validation is set, the function was called with params, - # # and they have changed, then validate requested values and assign to target_set - # if self.prefs.paramValidationPref and params and not params is None and not params is target_set: - # # self._validate_params(params, target_set, context=FUNCTION_CHECK_ARGS) - # self._validate_params(request_set=params, target_set=target_set, context=context) - - # If params have been passed, treat as runtime params and assign to paramsCurrent - # (relabel params as runtime_params for clarity) - runtime_params = params - if runtime_params and runtime_params is not None: - for param_name in self.user_params: - # Ignore input_states and output_states -- they should not be modified during run - # IMPLEMENTATION NOTE: - # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: - continue - # If param is specified in runtime_params, then assign it - if param_name in runtime_params: - self.paramsCurrent[param_name] = runtime_params[param_name] - # Otherwise, (re-)assign to paramInstanceDefaults - # this insures that any params that were assigned as runtime on last execution are reset here - # (unless they have been assigned another runtime value) - elif not self.runtimeParamStickyAssignmentPref: - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - self.runtime_params_in_use = True - - # Otherwise, reset paramsCurrent to paramInstanceDefaults - elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # Can't do the following since function could still be a class ref rather than abound method (see below) - # self.paramsCurrent = self.paramInstanceDefaults - for param_name in self.user_params: - # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name is FUNCTION: - continue - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - - self.runtime_params_in_use = False - - # If parameter_validation is set and they have changed, then validate requested values and assign to target_set - if self.prefs.paramValidationPref and params and not params is target_set: - try: - self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) - except TypeError: - self._validate_params(request_set=params, target_set=target_set, context=context) - def function(self, variable=None, params=None, @@ -4857,39 +4779,351 @@ def function(self, updated value of integral : 2d np.array """ - self._accumulator_check_args(variable, params=params, context=context) - # rate = np.array(self.rate).astype(float) - # increment = self.increment + variable = self.variable - if self.rate is None: - rate = 1.0 - else: - rate = self.rate + def dv_dt(time, v): + return v - (v**3)/3 - self.previous_w + variable - if self.increment is None: - increment = 0.0 - else: - increment = self.increment + def dw_dt(time, w): + return self.a*(self.previous_v + self.b - self.c*w) - # execute noise if it is a function - noise = self._try_execute_param(self.noise, variable) + new_v = self._runge_kutta_4(previous_time=self.previous_t, + previous_value=self.previous_v, + slope=dv_dt, + time_step_size=self.time_step_size) + new_w = self._runge_kutta_4(previous_time=self.previous_t, + previous_value=self.previous_w, + slope=dw_dt, + time_step_size=self.time_step_size) - # try: - # previous_value = params[INITIALIZER] - # except (TypeError, KeyError): + if not context or INITIALIZING not in context: + self.previous_v = new_v + self.previous_w = new_w + self.previous_t += self.time_step_size - previous_value = np.atleast_2d(self.previous_value) + return new_v, new_w - value = previous_value*rate + noise + increment + class AccumulatorIntegrator( + Integrator): # -------------------------------------------------------------------------------- + """ + AccumulatorIntegrator( \ + default_variable=None, \ + rate=1.0, \ + noise=0.0, \ + scale: parameter_spec = 1.0, \ + offset: parameter_spec = 0.0, \ + initializer, \ + params=None, \ + owner=None, \ + prefs=None, \ + ) + + .. _AccumulatorIntegrator: + + Integrates prior value by multiplying `previous_value ` by `rate + ` and adding `increment ` and `noise + `. Ignores `variable `). - # If this NOT an initialization run, update the old value - # If it IS an initialization run, leave as is - # (don't want to count it as an execution step) - if not context or not INITIALIZING in context: - self.previous_value = value - return value + Arguments + --------- + + default_variable : number, list or np.array : default ClassDefaults.variable + specifies a template for the value to be integrated; if it is a list or array, each element is independently + integrated. + + rate : float, list or 1d np.array : default 1.0 + specifies the multiplicative decrement of `previous_value ` (i.e., + the rate of exponential decay). If it is a list or array, it must be the same length as + `variable `. + + increment : float, list or 1d np.array : default 0.0 + specifies an amount to be added to `prevous_value ` in each call to + `function ` (see `increment ` for details). + If it is a list or array, it must be the same length as `variable ` + (see `increment ` for details). + + noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 + specifies random value to be added to `prevous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` (see `noise ` for details). + + initializer float, list or 1d np.array : default 0.0 + specifies starting value for integration. If it is a list or array, it must be the same length as + `default_variable ` (see `initializer + ` for details). + + params : Optional[Dict[param keyword, param value]] + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + prefs : Optional[PreferenceSet or specification dict : Function.classPreferences] + the `PreferenceSet` for the Function. If it is not specified, a default is assigned using `classPreferences` + defined in __init__.py (see :doc:`PreferenceSet ` for details). + + + Attributes + ---------- + + variable : number or np.array + **Ignored** by the AccumulatorIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for + integrator functions that depend on both a prior value and a new value (variable). + + rate : float or 1d np.array + determines the multiplicative decrement of `previous_value ` (i.e., the + rate of exponential decay) in each call to `function `. If it is a list or + array, it must be the same length as `variable ` and each element is + used to multiply the corresponding element of `previous_value ` (i.e., + it is used for Hadamard multiplication). If it is a scalar or has a single element, its value is used to + multiply all the elements of `previous_value `. + + increment : float, function, list, or 1d np.array + determines the amount added to `previous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` and each element is added to the corresponding element of + `previous_value ` (i.e., it is used for Hadamard addition). If it is a + scalar or has a single element, its value is added to all the elements of `previous_value + `. + + noise : float, function, list, or 1d np.array + determines a random value to be added in each call to `function `. + If it is a list or array, it must be the same length as `variable ` and + each element is added to the corresponding element of `previous_value ` + (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all + the elements of `previous_value `. If it is a function, it will be + executed separately and added to each element. + + .. note:: + + In order to generate random noise, a probability distribution function should be selected (see + `Distribution Functions ` for details), which will generate a new noise value from + its distribution on each execution. If noise is specified as a float or as a function with a fixed output, + then the noise will simply be an offset that remains the same across all executions. + + initializer : float, 1d np.array or list + determines the starting value for integration (i.e., the value to which `previous_value + ` is set. If initializer is a list or array, it must be the same length + as `variable `. + + TBI: + + Initializer may be a function or list/array of functions. + + If initializer is specified as a single float or function, while `variable ` is + a list or array, initializer will be applied to each variable element. In the case of an initializer function, + this means that the function will be executed separately for each variable element. + + previous_value : 1d np.array : default ClassDefaults.variable + stores previous value to which `rate ` and `noise ` + will be added. + + owner : Mechanism + `component ` to which the Function has been assigned. + + prefs : PreferenceSet or specification dict : Projection.classPreferences + the `PreferenceSet` for function. Specified in the **prefs** argument of the constructor for the function; + if it is not specified, a default is assigned using `classPreferences` defined in __init__.py + (see :doc:`PreferenceSet ` for details). + + """ + + componentName = ACCUMULATOR_INTEGRATOR_FUNCTION + + class ClassDefaults(Integrator.ClassDefaults): + variable = [[0]] + + paramClassDefaults = Function_Base.paramClassDefaults.copy() + # paramClassDefaults.update({INITIALIZER: ClassDefaults.variable}) + paramClassDefaults.update({ + NOISE: None, + RATE: None, + INCREMENT: None, + }) + + # multiplicative param does not make sense in this case + multiplicative_param = RATE + additive_param = INCREMENT + + @tc.typecheck + def __init__(self, + default_variable=None, + # rate: parameter_spec = 1.0, + rate=None, + noise=0.0, + increment=None, + initializer=ClassDefaults.variable, + params: tc.optional(dict) = None, + owner=None, + prefs: is_pref_set = None, + context="AccumulatorIntegrator Init"): + + # Assign args to params and functionParams dicts (kwConstants must == arg names) + params = self._assign_args_to_param_dicts(rate=rate, + initializer=initializer, + noise=noise, + increment=increment, + params=params) + + super().__init__( + # default_variable=default_variable, + params=params, + owner=owner, + prefs=prefs, + context=context) + + self.previous_value = self.initializer + self.instance_defaults.variable = self.initializer + + self.auto_dependent = True + + def _accumulator_check_args(self, variable=None, params=None, target_set=None, context=None): + """validate params and assign any runtime params. + + Called by AccumulatorIntegrator to validate params + Validation can be suppressed by turning parameter_validation attribute off + target_set is a params dictionary to which params should be assigned; + otherwise, they are assigned to paramsCurrent; + + Does the following: + - assign runtime params to paramsCurrent + - validate params if PARAM_VALIDATION is set + + :param params: (dict) - params to validate + :target_set: (dict) - set to which params should be assigned (default: self.paramsCurrent) + :return: + """ + + # PARAMS ------------------------------------------------------------ + + # If target_set is not specified, use paramsCurrent + if target_set is None: + target_set = self.paramsCurrent + + # # MODIFIED 11/27/16 OLD: + # # If parameter_validation is set, the function was called with params, + # # and they have changed, then validate requested values and assign to target_set + # if self.prefs.paramValidationPref and params and not params is None and not params is target_set: + # # self._validate_params(params, target_set, context=FUNCTION_CHECK_ARGS) + # self._validate_params(request_set=params, target_set=target_set, context=context) + + # If params have been passed, treat as runtime params and assign to paramsCurrent + # (relabel params as runtime_params for clarity) + runtime_params = params + if runtime_params and runtime_params is not None: + for param_name in self.user_params: + # Ignore input_states and output_states -- they should not be modified during run + # IMPLEMENTATION NOTE: + # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: + # At present, assignment of ``function`` as runtime param is not supported + # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: + continue + # If param is specified in runtime_params, then assign it + if param_name in runtime_params: + self.paramsCurrent[param_name] = runtime_params[param_name] + # Otherwise, (re-)assign to paramInstanceDefaults + # this insures that any params that were assigned as runtime on last execution are reset here + # (unless they have been assigned another runtime value) + elif not self.runtimeParamStickyAssignmentPref: + if param_name is FUNCTION_PARAMS: + for function_param in self.function_object.user_params: + self.function_object.paramsCurrent[function_param] = \ + self.function_object.paramInstanceDefaults[function_param] + continue + self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + self.runtime_params_in_use = True + + # Otherwise, reset paramsCurrent to paramInstanceDefaults + elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: + # Can't do the following since function could still be a class ref rather than abound method (see below) + # self.paramsCurrent = self.paramInstanceDefaults + for param_name in self.user_params: + # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED + # At present, assignment of ``function`` as runtime param is not supported + # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + if param_name is FUNCTION: + continue + if param_name is FUNCTION_PARAMS: + for function_param in self.function_object.user_params: + self.function_object.paramsCurrent[function_param] = \ + self.function_object.paramInstanceDefaults[function_param] + continue + self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + + self.runtime_params_in_use = False + + # If parameter_validation is set and they have changed, then validate requested values and assign to target_set + if self.prefs.paramValidationPref and params and not params is target_set: + try: + self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) + except TypeError: + self._validate_params(request_set=params, target_set=target_set, context=context) + + def function(self, + variable=None, + params=None, + time_scale=TimeScale.TRIAL, + context=None): + """ + Return: `previous_value ` combined with `rate ` and + `noise `. + + Arguments + --------- + + params : Optional[Dict[param keyword, param value]] + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + time_scale : TimeScale : default TimeScale.TRIAL + specifies whether the function is executed on the time_step or trial time scale. + + Returns + ------- + + updated value of integral : 2d np.array + + """ + self._accumulator_check_args(variable, params=params, context=context) + + # rate = np.array(self.rate).astype(float) + # increment = self.increment + + if self.rate is None: + rate = 1.0 + else: + rate = self.rate + + if self.increment is None: + increment = 0.0 + else: + increment = self.increment + + # execute noise if it is a function + noise = self._try_execute_param(self.noise, variable) + + # try: + # previous_value = params[INITIALIZER] + # except (TypeError, KeyError): + + previous_value = np.atleast_2d(self.previous_value) + + value = previous_value * rate + noise + increment + + # If this NOT an initialization run, update the old value + # If it IS an initialization run, leave as is + # (don't want to count it as an execution step) + if not context or not INITIALIZING in context: + self.previous_value = value + return value # Note: For any of these that correspond to args, value must match the name of the corresponding arg in __init__() From 83d1083d76ba6c67b7ebde6d2f34369b2c6f4301 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 5 Sep 2017 11:57:59 -0400 Subject: [PATCH 25/69] adding pytest for FHN integrator function -- not working yet because we need to grab w value, not just v, from mechanism --- PsyNeuLink/Components/Functions/Function.py | 2 +- tests/mechanisms/test_integrator_mechanism.py | 25 ++++++++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 571f4ce16ce..9b75736ae73 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4719,7 +4719,7 @@ def __init__(self, default_variable=None, initial_w = 0.0, initial_v = 0.0, - time_step_size = 1.0, + time_step_size = 0.1, t_0 = 0.0, a = 0.08, b = 0.7, diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index ab0f208878d..13c723b4dfb 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -2,7 +2,7 @@ import pytest from PsyNeuLink.Components.Functions.Function import AccumulatorIntegrator, ConstantIntegrator, NormalDist, \ - SimpleIntegrator + SimpleIntegrator, FHNIntegrator from PsyNeuLink.Components.Functions.Function import AdaptiveIntegrator, DriftDiffusionIntegrator, \ OrnsteinUhlenbeckIntegrator from PsyNeuLink.Components.Functions.Function import FunctionError @@ -645,3 +645,26 @@ def test_integrator_ornstein_uhlenbeck_noise_val(self): val = float(I.execute(10)) np.testing.assert_allclose(val, 15.010789523731438) + + +class TestFHN: + + def test_integrator_simple_noise_fn(self): + F = IntegratorMechanism( + name='IntegratorMech-FHNFunction', + function=FHNIntegrator( + + ) + ) + v_list = [] + w_list = [] + for i in range(15000): + v_list.append(F.execute(1)[0][0]) + v_list.append(F.execute(1)[0][1]) + # val = float(I.execute(10)) + # + # I.function_object.reset_initializer = 5.0 + # + # val2 = float(I.execute(0)) + # + # np.testing.assert_allclose(val, 4.022722120123589) From 913b18c3af602ae6cfdb7618670492247b9003e8 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Tue, 5 Sep 2017 13:31:51 -0400 Subject: [PATCH 26/69] cleaning up FHN integrator mechanism and adding a pytest to verify it --- PsyNeuLink/Components/Functions/Function.py | 585 +++++++++--------- PsyNeuLink/Globals/Keywords.py | 1 + tests/mechanisms/test_integrator_mechanism.py | 46 +- 3 files changed, 327 insertions(+), 305 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 9b75736ae73..dec659c77b8 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -200,7 +200,7 @@ from PsyNeuLink.Components.Component import Component, ComponentError, function_type, method_type, parameter_keywords from PsyNeuLink.Components.ShellClasses import Function -from PsyNeuLink.Globals.Keywords import ACCUMULATOR_INTEGRATOR_FUNCTION, ADAPTIVE_INTEGRATOR_FUNCTION, ALL, ANGLE, ARGUMENT_THERAPY_FUNCTION, AUTO_ASSIGN_MATRIX, AUTO_DEPENDENT, BACKPROPAGATION_FUNCTION, BETA, BIAS, COMBINATION_FUNCTION_TYPE, CONSTANT_INTEGRATOR_FUNCTION, CORRELATION, CROSS_ENTROPY, DECAY, DIFFERENCE, DISTANCE_FUNCTION, DISTANCE_METRICS, DIST_FUNCTION_TYPE, DIST_MEAN, DIST_SHAPE, DRIFT_DIFFUSION_INTEGRATOR_FUNCTION, ENERGY, ENTROPY, EUCLIDEAN, EXAMPLE_FUNCTION_TYPE, EXECUTING, EXPONENTIAL_DIST_FUNCTION, EXPONENTIAL_FUNCTION, EXPONENTS, FULL_CONNECTIVITY_MATRIX, FUNCTION, FUNCTION_OUTPUT_TYPE, FUNCTION_OUTPUT_TYPE_CONVERSION, FUNCTION_PARAMS, GAIN, GAMMA_DIST_FUNCTION, HIGH, HOLLOW_MATRIX, IDENTITY_MATRIX, INCREMENT, INITIALIZER, INITIALIZING, INPUT_STATES, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, INTERCEPT, LEARNING_FUNCTION_TYPE, LINEAR_COMBINATION_FUNCTION, LINEAR_FUNCTION, LINEAR_MATRIX_FUNCTION, LOGISTIC_FUNCTION, LOW, MATRIX, MATRIX_KEYWORD_NAMES, MATRIX_KEYWORD_VALUES, MAX_INDICATOR, MAX_VAL, NOISE, NORMAL_DIST_FUNCTION, OBJECTIVE_FUNCTION_TYPE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_STATES, OUTPUT_TYPE, PARAMETER_STATE_PARAMS, PEARSON, PROB, PRODUCT, RANDOM_CONNECTIVITY_MATRIX, RATE, RECEIVER, REDUCE_FUNCTION, RL_FUNCTION, SCALE, SIMPLE_INTEGRATOR_FUNCTION, SLOPE, SOFTMAX_FUNCTION, STABILITY_FUNCTION, STANDARD_DEVIATION, SUM, TIME_STEP_SIZE, TRANSFER_FUNCTION_TYPE, UNIFORM_DIST_FUNCTION, USER_DEFINED_FUNCTION, USER_DEFINED_FUNCTION_TYPE, WALD_DIST_FUNCTION, WEIGHTS, kwComponentCategory, kwPreferenceSetName +from PsyNeuLink.Globals.Keywords import FHN_INTEGRATOR_FUNCTION, ACCUMULATOR_INTEGRATOR_FUNCTION, ADAPTIVE_INTEGRATOR_FUNCTION, ALL, ANGLE, ARGUMENT_THERAPY_FUNCTION, AUTO_ASSIGN_MATRIX, AUTO_DEPENDENT, BACKPROPAGATION_FUNCTION, BETA, BIAS, COMBINATION_FUNCTION_TYPE, CONSTANT_INTEGRATOR_FUNCTION, CORRELATION, CROSS_ENTROPY, DECAY, DIFFERENCE, DISTANCE_FUNCTION, DISTANCE_METRICS, DIST_FUNCTION_TYPE, DIST_MEAN, DIST_SHAPE, DRIFT_DIFFUSION_INTEGRATOR_FUNCTION, ENERGY, ENTROPY, EUCLIDEAN, EXAMPLE_FUNCTION_TYPE, EXECUTING, EXPONENTIAL_DIST_FUNCTION, EXPONENTIAL_FUNCTION, EXPONENTS, FULL_CONNECTIVITY_MATRIX, FUNCTION, FUNCTION_OUTPUT_TYPE, FUNCTION_OUTPUT_TYPE_CONVERSION, FUNCTION_PARAMS, GAIN, GAMMA_DIST_FUNCTION, HIGH, HOLLOW_MATRIX, IDENTITY_MATRIX, INCREMENT, INITIALIZER, INITIALIZING, INPUT_STATES, INTEGRATOR_FUNCTION, INTEGRATOR_FUNCTION_TYPE, INTERCEPT, LEARNING_FUNCTION_TYPE, LINEAR_COMBINATION_FUNCTION, LINEAR_FUNCTION, LINEAR_MATRIX_FUNCTION, LOGISTIC_FUNCTION, LOW, MATRIX, MATRIX_KEYWORD_NAMES, MATRIX_KEYWORD_VALUES, MAX_INDICATOR, MAX_VAL, NOISE, NORMAL_DIST_FUNCTION, OBJECTIVE_FUNCTION_TYPE, OFFSET, OPERATION, ORNSTEIN_UHLENBECK_INTEGRATOR_FUNCTION, OUTPUT_STATES, OUTPUT_TYPE, PARAMETER_STATE_PARAMS, PEARSON, PROB, PRODUCT, RANDOM_CONNECTIVITY_MATRIX, RATE, RECEIVER, REDUCE_FUNCTION, RL_FUNCTION, SCALE, SIMPLE_INTEGRATOR_FUNCTION, SLOPE, SOFTMAX_FUNCTION, STABILITY_FUNCTION, STANDARD_DEVIATION, SUM, TIME_STEP_SIZE, TRANSFER_FUNCTION_TYPE, UNIFORM_DIST_FUNCTION, USER_DEFINED_FUNCTION, USER_DEFINED_FUNCTION_TYPE, WALD_DIST_FUNCTION, WEIGHTS, kwComponentCategory, kwPreferenceSetName from PsyNeuLink.Globals.Preferences.ComponentPreferenceSet import is_pref_set, kpReportOutputPref, kpRuntimeParamStickyAssignmentPref from PsyNeuLink.Globals.Preferences.PreferenceSet import PreferenceEntry, PreferenceLevel from PsyNeuLink.Globals.Registry import register_category @@ -3367,28 +3367,25 @@ def _try_execute_param(self, param, var): def _euler(self, previous_value, previous_time, slope, time_step_size): - if callable(self.slope): - slope = self.slope(previous_time, previous_value) - else: - slope = self.slope + if callable(slope): + slope = slope(previous_time, previous_value) return previous_value + slope*time_step_size def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): - if callable(self.slope): - slope_approx_1 = self.slope(previous_time, + if callable(slope): + slope_approx_1 = slope(previous_time, previous_value) - slope_approx_2 = self.slope(previous_time + time_step_size/2, + slope_approx_2 = slope(previous_time + time_step_size/2, previous_value + (0.5 * time_step_size * slope_approx_1)) - slope_approx_3 = self.slope(previous_time + time_step_size/2, + slope_approx_3 = slope(previous_time + time_step_size/2, previous_value + (0.5 * time_step_size * slope_approx_2)) - slope_approx_4 = self.slope(previous_time + time_step_size, + slope_approx_4 = slope(previous_time + time_step_size, previous_value + (time_step_size * slope_approx_3)) value = previous_value \ + (time_step_size/6)*(slope_approx_1 + 2*(slope_approx_2 + slope_approx_3) + slope_approx_4) else: - slope = self.slope value = previous_value + time_step_size*slope return value @@ -4697,13 +4694,13 @@ class FHNIntegrator( """ - componentName = ACCUMULATOR_INTEGRATOR_FUNCTION + componentName = FHN_INTEGRATOR_FUNCTION class ClassDefaults(Integrator.ClassDefaults): variable = [[0]] paramClassDefaults = Function_Base.paramClassDefaults.copy() - # paramClassDefaults.update({INITIALIZER: ClassDefaults.variable}) + paramClassDefaults.update({INITIALIZER: ClassDefaults.variable}) paramClassDefaults.update({ NOISE: None, RATE: None, @@ -4716,7 +4713,7 @@ class ClassDefaults(Integrator.ClassDefaults): @tc.typecheck def __init__(self, - default_variable=None, + default_variable=1.0, initial_w = 0.0, initial_v = 0.0, time_step_size = 0.1, @@ -4730,7 +4727,8 @@ def __init__(self, context="FHNIntegrator Init"): # Assign args to params and functionParams dicts (kwConstants must == arg names) - params = self._assign_args_to_param_dicts(initial_v = initial_v, + params = self._assign_args_to_param_dicts(default_variable = default_variable, + initial_v = initial_v, initial_w = initial_w, time_step_size = time_step_size, t_0 = t_0, @@ -4739,20 +4737,23 @@ def __init__(self, c = c, params=params) - super().__init__( - # default_variable=default_variable, - params=params, - owner=owner, - prefs=prefs, - context=context) - self.previous_v = self.initial_v self.previous_w = self.initial_w self.previous_t = self.t_0 - self.instance_defaults.variable = self.initializer + super().__init__( + default_variable=default_variable, + params=params, + owner=owner, + prefs=prefs, + context=context) + self.variable = self.default_variable self.auto_dependent = True + + + + def function(self, variable=None, params=None, @@ -4805,325 +4806,325 @@ def dw_dt(time, w): return new_v, new_w - class AccumulatorIntegrator( - Integrator): # -------------------------------------------------------------------------------- - """ - AccumulatorIntegrator( \ - default_variable=None, \ - rate=1.0, \ - noise=0.0, \ - scale: parameter_spec = 1.0, \ - offset: parameter_spec = 0.0, \ - initializer, \ - params=None, \ - owner=None, \ - prefs=None, \ - ) - - .. _AccumulatorIntegrator: - - Integrates prior value by multiplying `previous_value ` by `rate - ` and adding `increment ` and `noise - `. Ignores `variable `). +class AccumulatorIntegrator( + Integrator): # -------------------------------------------------------------------------------- + """ + AccumulatorIntegrator( \ + default_variable=None, \ + rate=1.0, \ + noise=0.0, \ + scale: parameter_spec = 1.0, \ + offset: parameter_spec = 0.0, \ + initializer, \ + params=None, \ + owner=None, \ + prefs=None, \ + ) - Arguments - --------- + .. _AccumulatorIntegrator: - default_variable : number, list or np.array : default ClassDefaults.variable - specifies a template for the value to be integrated; if it is a list or array, each element is independently - integrated. + Integrates prior value by multiplying `previous_value ` by `rate + ` and adding `increment ` and `noise + `. Ignores `variable `). - rate : float, list or 1d np.array : default 1.0 - specifies the multiplicative decrement of `previous_value ` (i.e., - the rate of exponential decay). If it is a list or array, it must be the same length as - `variable `. + Arguments + --------- - increment : float, list or 1d np.array : default 0.0 - specifies an amount to be added to `prevous_value ` in each call to - `function ` (see `increment ` for details). - If it is a list or array, it must be the same length as `variable ` - (see `increment ` for details). + default_variable : number, list or np.array : default ClassDefaults.variable + specifies a template for the value to be integrated; if it is a list or array, each element is independently + integrated. - noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 - specifies random value to be added to `prevous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` (see `noise ` for details). + rate : float, list or 1d np.array : default 1.0 + specifies the multiplicative decrement of `previous_value ` (i.e., + the rate of exponential decay). If it is a list or array, it must be the same length as + `variable `. - initializer float, list or 1d np.array : default 0.0 - specifies starting value for integration. If it is a list or array, it must be the same length as - `default_variable ` (see `initializer - ` for details). + increment : float, list or 1d np.array : default 0.0 + specifies an amount to be added to `prevous_value ` in each call to + `function ` (see `increment ` for details). + If it is a list or array, it must be the same length as `variable ` + (see `increment ` for details). - params : Optional[Dict[param keyword, param value]] - a `parameter dictionary ` that specifies the parameters for the - function. Values specified for parameters in the dictionary override any assigned to those parameters in - arguments of the constructor. + noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 + specifies random value to be added to `prevous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` (see `noise ` for details). + + initializer float, list or 1d np.array : default 0.0 + specifies starting value for integration. If it is a list or array, it must be the same length as + `default_variable ` (see `initializer + ` for details). + + params : Optional[Dict[param keyword, param value]] + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. + + owner : Component + `component ` to which to assign the Function. + + prefs : Optional[PreferenceSet or specification dict : Function.classPreferences] + the `PreferenceSet` for the Function. If it is not specified, a default is assigned using `classPreferences` + defined in __init__.py (see :doc:`PreferenceSet ` for details). + + + Attributes + ---------- + + variable : number or np.array + **Ignored** by the AccumulatorIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for + integrator functions that depend on both a prior value and a new value (variable). + + rate : float or 1d np.array + determines the multiplicative decrement of `previous_value ` (i.e., the + rate of exponential decay) in each call to `function `. If it is a list or + array, it must be the same length as `variable ` and each element is + used to multiply the corresponding element of `previous_value ` (i.e., + it is used for Hadamard multiplication). If it is a scalar or has a single element, its value is used to + multiply all the elements of `previous_value `. + + increment : float, function, list, or 1d np.array + determines the amount added to `previous_value ` in each call to + `function `. If it is a list or array, it must be the same length as + `variable ` and each element is added to the corresponding element of + `previous_value ` (i.e., it is used for Hadamard addition). If it is a + scalar or has a single element, its value is added to all the elements of `previous_value + `. + + noise : float, function, list, or 1d np.array + determines a random value to be added in each call to `function `. + If it is a list or array, it must be the same length as `variable ` and + each element is added to the corresponding element of `previous_value ` + (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all + the elements of `previous_value `. If it is a function, it will be + executed separately and added to each element. + + .. note:: + + In order to generate random noise, a probability distribution function should be selected (see + `Distribution Functions ` for details), which will generate a new noise value from + its distribution on each execution. If noise is specified as a float or as a function with a fixed output, + then the noise will simply be an offset that remains the same across all executions. + + initializer : float, 1d np.array or list + determines the starting value for integration (i.e., the value to which `previous_value + ` is set. If initializer is a list or array, it must be the same length + as `variable `. + + TBI: - owner : Component - `component ` to which to assign the Function. + Initializer may be a function or list/array of functions. - prefs : Optional[PreferenceSet or specification dict : Function.classPreferences] - the `PreferenceSet` for the Function. If it is not specified, a default is assigned using `classPreferences` - defined in __init__.py (see :doc:`PreferenceSet ` for details). + If initializer is specified as a single float or function, while `variable ` is + a list or array, initializer will be applied to each variable element. In the case of an initializer function, + this means that the function will be executed separately for each variable element. + previous_value : 1d np.array : default ClassDefaults.variable + stores previous value to which `rate ` and `noise ` + will be added. - Attributes - ---------- + owner : Mechanism + `component ` to which the Function has been assigned. + + prefs : PreferenceSet or specification dict : Projection.classPreferences + the `PreferenceSet` for function. Specified in the **prefs** argument of the constructor for the function; + if it is not specified, a default is assigned using `classPreferences` defined in __init__.py + (see :doc:`PreferenceSet ` for details). - variable : number or np.array - **Ignored** by the AccumulatorIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for - integrator functions that depend on both a prior value and a new value (variable). + """ - rate : float or 1d np.array - determines the multiplicative decrement of `previous_value ` (i.e., the - rate of exponential decay) in each call to `function `. If it is a list or - array, it must be the same length as `variable ` and each element is - used to multiply the corresponding element of `previous_value ` (i.e., - it is used for Hadamard multiplication). If it is a scalar or has a single element, its value is used to - multiply all the elements of `previous_value `. + componentName = ACCUMULATOR_INTEGRATOR_FUNCTION - increment : float, function, list, or 1d np.array - determines the amount added to `previous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` and each element is added to the corresponding element of - `previous_value ` (i.e., it is used for Hadamard addition). If it is a - scalar or has a single element, its value is added to all the elements of `previous_value - `. + class ClassDefaults(Integrator.ClassDefaults): + variable = [[0]] - noise : float, function, list, or 1d np.array - determines a random value to be added in each call to `function `. - If it is a list or array, it must be the same length as `variable ` and - each element is added to the corresponding element of `previous_value ` - (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all - the elements of `previous_value `. If it is a function, it will be - executed separately and added to each element. + paramClassDefaults = Function_Base.paramClassDefaults.copy() + # paramClassDefaults.update({INITIALIZER: ClassDefaults.variable}) + paramClassDefaults.update({ + NOISE: None, + RATE: None, + INCREMENT: None, + }) - .. note:: + # multiplicative param does not make sense in this case + multiplicative_param = RATE + additive_param = INCREMENT - In order to generate random noise, a probability distribution function should be selected (see - `Distribution Functions ` for details), which will generate a new noise value from - its distribution on each execution. If noise is specified as a float or as a function with a fixed output, - then the noise will simply be an offset that remains the same across all executions. + @tc.typecheck + def __init__(self, + default_variable=None, + # rate: parameter_spec = 1.0, + rate=None, + noise=0.0, + increment=None, + initializer=ClassDefaults.variable, + params: tc.optional(dict) = None, + owner=None, + prefs: is_pref_set = None, + context="AccumulatorIntegrator Init"): - initializer : float, 1d np.array or list - determines the starting value for integration (i.e., the value to which `previous_value - ` is set. If initializer is a list or array, it must be the same length - as `variable `. + # Assign args to params and functionParams dicts (kwConstants must == arg names) + params = self._assign_args_to_param_dicts(rate=rate, + initializer=initializer, + noise=noise, + increment=increment, + params=params) - TBI: + super().__init__( + # default_variable=default_variable, + params=params, + owner=owner, + prefs=prefs, + context=context) - Initializer may be a function or list/array of functions. + self.previous_value = self.initializer + self.instance_defaults.variable = self.initializer - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. + self.auto_dependent = True - previous_value : 1d np.array : default ClassDefaults.variable - stores previous value to which `rate ` and `noise ` - will be added. + def _accumulator_check_args(self, variable=None, params=None, target_set=None, context=None): + """validate params and assign any runtime params. - owner : Mechanism - `component ` to which the Function has been assigned. + Called by AccumulatorIntegrator to validate params + Validation can be suppressed by turning parameter_validation attribute off + target_set is a params dictionary to which params should be assigned; + otherwise, they are assigned to paramsCurrent; - prefs : PreferenceSet or specification dict : Projection.classPreferences - the `PreferenceSet` for function. Specified in the **prefs** argument of the constructor for the function; - if it is not specified, a default is assigned using `classPreferences` defined in __init__.py - (see :doc:`PreferenceSet ` for details). + Does the following: + - assign runtime params to paramsCurrent + - validate params if PARAM_VALIDATION is set + :param params: (dict) - params to validate + :target_set: (dict) - set to which params should be assigned (default: self.paramsCurrent) + :return: """ - componentName = ACCUMULATOR_INTEGRATOR_FUNCTION - - class ClassDefaults(Integrator.ClassDefaults): - variable = [[0]] - - paramClassDefaults = Function_Base.paramClassDefaults.copy() - # paramClassDefaults.update({INITIALIZER: ClassDefaults.variable}) - paramClassDefaults.update({ - NOISE: None, - RATE: None, - INCREMENT: None, - }) - - # multiplicative param does not make sense in this case - multiplicative_param = RATE - additive_param = INCREMENT - - @tc.typecheck - def __init__(self, - default_variable=None, - # rate: parameter_spec = 1.0, - rate=None, - noise=0.0, - increment=None, - initializer=ClassDefaults.variable, - params: tc.optional(dict) = None, - owner=None, - prefs: is_pref_set = None, - context="AccumulatorIntegrator Init"): - - # Assign args to params and functionParams dicts (kwConstants must == arg names) - params = self._assign_args_to_param_dicts(rate=rate, - initializer=initializer, - noise=noise, - increment=increment, - params=params) - - super().__init__( - # default_variable=default_variable, - params=params, - owner=owner, - prefs=prefs, - context=context) - - self.previous_value = self.initializer - self.instance_defaults.variable = self.initializer - - self.auto_dependent = True - - def _accumulator_check_args(self, variable=None, params=None, target_set=None, context=None): - """validate params and assign any runtime params. - - Called by AccumulatorIntegrator to validate params - Validation can be suppressed by turning parameter_validation attribute off - target_set is a params dictionary to which params should be assigned; - otherwise, they are assigned to paramsCurrent; - - Does the following: - - assign runtime params to paramsCurrent - - validate params if PARAM_VALIDATION is set - - :param params: (dict) - params to validate - :target_set: (dict) - set to which params should be assigned (default: self.paramsCurrent) - :return: - """ - - # PARAMS ------------------------------------------------------------ - - # If target_set is not specified, use paramsCurrent - if target_set is None: - target_set = self.paramsCurrent - - # # MODIFIED 11/27/16 OLD: - # # If parameter_validation is set, the function was called with params, - # # and they have changed, then validate requested values and assign to target_set - # if self.prefs.paramValidationPref and params and not params is None and not params is target_set: - # # self._validate_params(params, target_set, context=FUNCTION_CHECK_ARGS) - # self._validate_params(request_set=params, target_set=target_set, context=context) - - # If params have been passed, treat as runtime params and assign to paramsCurrent - # (relabel params as runtime_params for clarity) - runtime_params = params - if runtime_params and runtime_params is not None: - for param_name in self.user_params: - # Ignore input_states and output_states -- they should not be modified during run - # IMPLEMENTATION NOTE: - # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: - continue - # If param is specified in runtime_params, then assign it - if param_name in runtime_params: - self.paramsCurrent[param_name] = runtime_params[param_name] - # Otherwise, (re-)assign to paramInstanceDefaults - # this insures that any params that were assigned as runtime on last execution are reset here - # (unless they have been assigned another runtime value) - elif not self.runtimeParamStickyAssignmentPref: - if param_name is FUNCTION_PARAMS: - for function_param in self.function_object.user_params: - self.function_object.paramsCurrent[function_param] = \ - self.function_object.paramInstanceDefaults[function_param] - continue - self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - self.runtime_params_in_use = True - - # Otherwise, reset paramsCurrent to paramInstanceDefaults - elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: - # Can't do the following since function could still be a class ref rather than abound method (see below) - # self.paramsCurrent = self.paramInstanceDefaults - for param_name in self.user_params: - # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED - # At present, assignment of ``function`` as runtime param is not supported - # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; - # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) - if param_name is FUNCTION: - continue + # PARAMS ------------------------------------------------------------ + + # If target_set is not specified, use paramsCurrent + if target_set is None: + target_set = self.paramsCurrent + + # # MODIFIED 11/27/16 OLD: + # # If parameter_validation is set, the function was called with params, + # # and they have changed, then validate requested values and assign to target_set + # if self.prefs.paramValidationPref and params and not params is None and not params is target_set: + # # self._validate_params(params, target_set, context=FUNCTION_CHECK_ARGS) + # self._validate_params(request_set=params, target_set=target_set, context=context) + + # If params have been passed, treat as runtime params and assign to paramsCurrent + # (relabel params as runtime_params for clarity) + runtime_params = params + if runtime_params and runtime_params is not None: + for param_name in self.user_params: + # Ignore input_states and output_states -- they should not be modified during run + # IMPLEMENTATION NOTE: + # FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED: + # At present, assignment of ``function`` as runtime param is not supported + # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + if param_name in {FUNCTION, INPUT_STATES, OUTPUT_STATES}: + continue + # If param is specified in runtime_params, then assign it + if param_name in runtime_params: + self.paramsCurrent[param_name] = runtime_params[param_name] + # Otherwise, (re-)assign to paramInstanceDefaults + # this insures that any params that were assigned as runtime on last execution are reset here + # (unless they have been assigned another runtime value) + elif not self.runtimeParamStickyAssignmentPref: if param_name is FUNCTION_PARAMS: for function_param in self.function_object.user_params: self.function_object.paramsCurrent[function_param] = \ self.function_object.paramInstanceDefaults[function_param] continue self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] + self.runtime_params_in_use = True + + # Otherwise, reset paramsCurrent to paramInstanceDefaults + elif self.runtime_params_in_use and not self.runtimeParamStickyAssignmentPref: + # Can't do the following since function could still be a class ref rather than abound method (see below) + # self.paramsCurrent = self.paramInstanceDefaults + for param_name in self.user_params: + # IMPLEMENTATION NOTE: FUNCTION_RUNTIME_PARAM_NOT_SUPPORTED + # At present, assignment of ``function`` as runtime param is not supported + # (this is because paramInstanceDefaults[FUNCTION] could be a class rather than an bound method; + # i.e., not yet instantiated; could be rectified by assignment in _instantiate_function) + if param_name is FUNCTION: + continue + if param_name is FUNCTION_PARAMS: + for function_param in self.function_object.user_params: + self.function_object.paramsCurrent[function_param] = \ + self.function_object.paramInstanceDefaults[function_param] + continue + self.paramsCurrent[param_name] = self.paramInstanceDefaults[param_name] - self.runtime_params_in_use = False + self.runtime_params_in_use = False - # If parameter_validation is set and they have changed, then validate requested values and assign to target_set - if self.prefs.paramValidationPref and params and not params is target_set: - try: - self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) - except TypeError: - self._validate_params(request_set=params, target_set=target_set, context=context) + # If parameter_validation is set and they have changed, then validate requested values and assign to target_set + if self.prefs.paramValidationPref and params and not params is target_set: + try: + self._validate_params(variable=variable, request_set=params, target_set=target_set, context=context) + except TypeError: + self._validate_params(request_set=params, target_set=target_set, context=context) - def function(self, - variable=None, - params=None, - time_scale=TimeScale.TRIAL, - context=None): - """ - Return: `previous_value ` combined with `rate ` and - `noise `. + def function(self, + variable=None, + params=None, + time_scale=TimeScale.TRIAL, + context=None): + """ + Return: `previous_value ` combined with `rate ` and + `noise `. - Arguments - --------- + Arguments + --------- - params : Optional[Dict[param keyword, param value]] - a `parameter dictionary ` that specifies the parameters for the - function. Values specified for parameters in the dictionary override any assigned to those parameters in - arguments of the constructor. + params : Optional[Dict[param keyword, param value]] + a `parameter dictionary ` that specifies the parameters for the + function. Values specified for parameters in the dictionary override any assigned to those parameters in + arguments of the constructor. - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the function is executed on the time_step or trial time scale. + time_scale : TimeScale : default TimeScale.TRIAL + specifies whether the function is executed on the time_step or trial time scale. - Returns - ------- + Returns + ------- - updated value of integral : 2d np.array + updated value of integral : 2d np.array - """ - self._accumulator_check_args(variable, params=params, context=context) + """ + self._accumulator_check_args(variable, params=params, context=context) - # rate = np.array(self.rate).astype(float) - # increment = self.increment + # rate = np.array(self.rate).astype(float) + # increment = self.increment - if self.rate is None: - rate = 1.0 - else: - rate = self.rate + if self.rate is None: + rate = 1.0 + else: + rate = self.rate - if self.increment is None: - increment = 0.0 - else: - increment = self.increment + if self.increment is None: + increment = 0.0 + else: + increment = self.increment - # execute noise if it is a function - noise = self._try_execute_param(self.noise, variable) + # execute noise if it is a function + noise = self._try_execute_param(self.noise, variable) - # try: - # previous_value = params[INITIALIZER] - # except (TypeError, KeyError): + # try: + # previous_value = params[INITIALIZER] + # except (TypeError, KeyError): - previous_value = np.atleast_2d(self.previous_value) + previous_value = np.atleast_2d(self.previous_value) - value = previous_value * rate + noise + increment + value = previous_value * rate + noise + increment - # If this NOT an initialization run, update the old value - # If it IS an initialization run, leave as is - # (don't want to count it as an execution step) - if not context or not INITIALIZING in context: - self.previous_value = value - return value + # If this NOT an initialization run, update the old value + # If it IS an initialization run, leave as is + # (don't want to count it as an execution step) + if not context or not INITIALIZING in context: + self.previous_value = value + return value # Note: For any of these that correspond to args, value must match the name of the corresponding arg in __init__() diff --git a/PsyNeuLink/Globals/Keywords.py b/PsyNeuLink/Globals/Keywords.py index 7c90b52f0f2..31a48606604 100644 --- a/PsyNeuLink/Globals/Keywords.py +++ b/PsyNeuLink/Globals/Keywords.py @@ -356,6 +356,7 @@ def _names(self): SIMPLE_INTEGRATOR_FUNCTION = "SimpleIntegrator Function" CONSTANT_INTEGRATOR_FUNCTION = "ConstantIntegrator Function" ACCUMULATOR_INTEGRATOR_FUNCTION = "AccumulatorIntegrator Function" +FHN_INTEGRATOR_FUNCTION = "FHNIntegrator Function" ACCUMULATOR_INTEGRATOR = "AccumulatorIntegrator" # (7/19/17 CW) added for MappingProjection.py ADAPTIVE_INTEGRATOR_FUNCTION = "AdaptiveIntegrator Function" DRIFT_DIFFUSION_INTEGRATOR_FUNCTION = "DriftDiffusionIntegrator Function" diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index 13c723b4dfb..a65de27db35 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -649,22 +649,42 @@ def test_integrator_ornstein_uhlenbeck_noise_val(self): class TestFHN: - def test_integrator_simple_noise_fn(self): + + def test_FHN(self): + F = IntegratorMechanism( name='IntegratorMech-FHNFunction', function=FHNIntegrator( ) ) - v_list = [] - w_list = [] - for i in range(15000): - v_list.append(F.execute(1)[0][0]) - v_list.append(F.execute(1)[0][1]) - # val = float(I.execute(10)) - # - # I.function_object.reset_initializer = 5.0 - # - # val2 = float(I.execute(0)) - # - # np.testing.assert_allclose(val, 4.022722120123589) + plot_v_list = [] + plot_w_list = [] + + expected_v_list = [] + expected_w_list = [] + stimulus = 1.0 + for i in range(10): + for j in range(10): + new_v = F.execute(stimulus)[0][0] + new_w = F.execute(stimulus)[1][0] + # ** uncomment the lines below if you want to view the plot: + # plot_v_list.append(new_v) + # plot_w_list.append(new_w) + expected_v_list.append(new_v) + expected_w_list.append(new_w) + + # ** uncomment the lines below if you want to view the plot: + # import matplotlib.pyplot as plt + # plt.plot(v_list) + # plt.plot(w_list) + # plt.show() + + np.testing.assert_allclose(expected_v_list, [1.9861589924245777, 1.9184159304279109, 1.7920107368145777, + 1.6651158106802393, 1.5360917598075965, 1.4019128309448776, + 1.2568420252868404, 1.08773745582042, 0.8541804646541804, + 0.34785588139530099]) + np.testing.assert_allclose(expected_w_list, [0.28713219302304327, 0.65355262255707869, 0.9581082373550347, + 1.2070585850028435, 1.4068978270680454, 1.5629844531368104, + 1.6793901854329185, 1.7583410650743645, 1.7981128658110572, + 1.7817328532815251]) From 18f89aa88482267d249d9799e344c33014dd96fa Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 6 Sep 2017 11:55:11 -0400 Subject: [PATCH 27/69] fully parameterizing the dv_dt equation in the FHNMechanism so that it accomodates the gilzenrat formulation --- PsyNeuLink/Components/Functions/Function.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index dec659c77b8..8a25ad8a8ac 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4718,9 +4718,16 @@ def __init__(self, initial_v = 0.0, time_step_size = 0.1, t_0 = 0.0, + v_a = -1/3, + v_b = 0.0, + v_c = 1.0, + v_d=0.0, + v_e=-1.0, + v_f=1.0, a = 0.08, b = 0.7, c =0.8, + tau = 1.0, params: tc.optional(dict) = None, owner=None, prefs: is_pref_set = None, @@ -4732,6 +4739,12 @@ def __init__(self, initial_w = initial_w, time_step_size = time_step_size, t_0 = t_0, + v_a = v_a, + v_b = v_b, + v_c = v_c, + v_d=v_d, + v_e=v_e, + v_f=v_f, a = a, b = b, c = c, @@ -4784,8 +4797,8 @@ def function(self, variable = self.variable def dv_dt(time, v): - return v - (v**3)/3 - self.previous_w + variable - + # return v - (v**3)/3 - self.previous_w + variable + return (self.v_a*v**3 + self.v_b*v**2 + self.v_c*v + self.v_d + self.v_e*self.previous_w + self.v_f*variable)/self.v_time_constant def dw_dt(time, w): return self.a*(self.previous_v + self.b - self.c*w) From 2f1e4b22abaec9df2d892364b8ef75b49a2ad387 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 6 Sep 2017 13:27:56 -0400 Subject: [PATCH 28/69] parameterizing dw/dt and setting defaults; adding scale and offset to both return values --- PsyNeuLink/Components/Functions/Function.py | 65 +++++++++++++-------- 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 8a25ad8a8ac..888fb21793a 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4714,20 +4714,23 @@ class ClassDefaults(Integrator.ClassDefaults): @tc.typecheck def __init__(self, default_variable=1.0, - initial_w = 0.0, - initial_v = 0.0, - time_step_size = 0.1, - t_0 = 0.0, - v_a = -1/3, - v_b = 0.0, - v_c = 1.0, + offset=0.0, + scale=1.0, + initial_w=0.0, + initial_v=0.0, + time_step_size=0.1, + t_0=0.0, + v_a=-1/3, + v_b=0.0, + v_c=1.0, v_d=0.0, v_e=-1.0, v_f=1.0, - a = 0.08, - b = 0.7, - c =0.8, - tau = 1.0, + v_time_constant=1.0, + w_a=1.0, + w_b=-0.8, + w_c=0.7, + w_time_constant = 12.5, params: tc.optional(dict) = None, owner=None, prefs: is_pref_set = None, @@ -4735,19 +4738,23 @@ def __init__(self, # Assign args to params and functionParams dicts (kwConstants must == arg names) params = self._assign_args_to_param_dicts(default_variable = default_variable, - initial_v = initial_v, - initial_w = initial_w, - time_step_size = time_step_size, - t_0 = t_0, - v_a = v_a, - v_b = v_b, - v_c = v_c, + offset=offset, + scale=scale, + initial_v=initial_v, + initial_w=initial_w, + time_step_size=time_step_size, + t_0=t_0, + v_a=v_a, + v_b=v_b, + v_c=v_c, v_d=v_d, v_e=v_e, v_f=v_f, - a = a, - b = b, - c = c, + v_time_constant=v_time_constant, + w_a=w_a, + w_b=w_b, + w_c=w_c, + w_time_constant=w_time_constant, params=params) self.previous_v = self.initial_v @@ -4797,20 +4804,28 @@ def function(self, variable = self.variable def dv_dt(time, v): + # standard coeffs: # return v - (v**3)/3 - self.previous_w + variable - return (self.v_a*v**3 + self.v_b*v**2 + self.v_c*v + self.v_d + self.v_e*self.previous_w + self.v_f*variable)/self.v_time_constant + + # general: + return (self.v_a*v**3 + self.v_b*v**2 + self.v_c*v + self.v_d + + self.v_e*self.previous_w + self.v_f*variable)/self.v_time_constant def dw_dt(time, w): - return self.a*(self.previous_v + self.b - self.c*w) + # standard coeffs: + # return self.a*(self.previous_v + self.b - self.c*w) + + # general: + return (self.w_a*self.previous_v + self.w_b*w + self.w_c)/self.w_time_constant new_v = self._runge_kutta_4(previous_time=self.previous_t, previous_value=self.previous_v, slope=dv_dt, - time_step_size=self.time_step_size) + time_step_size=self.time_step_size)*self.scale + self.offset new_w = self._runge_kutta_4(previous_time=self.previous_t, previous_value=self.previous_w, slope=dw_dt, - time_step_size=self.time_step_size) + time_step_size=self.time_step_size)*self.scale + self.offset if not context or INITIALIZING not in context: self.previous_v = new_v From 1d722e4d9f790e5f6158964f2a6d85605d3f309f Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 6 Sep 2017 13:50:09 -0400 Subject: [PATCH 29/69] cleaning up and documenting FHN integrator --- PsyNeuLink/Components/Functions/Function.py | 115 ++++++++------------ 1 file changed, 45 insertions(+), 70 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 888fb21793a..bbed8e2171a 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4576,7 +4576,21 @@ class FHNIntegrator( noise=0.0, \ scale: parameter_spec = 1.0, \ offset: parameter_spec = 0.0, \ - initializer, \ + initial_w=0.0, \ + initial_v=0.0, \ + time_step_size=0.1, \ + t_0=0.0, \ + a_v=-1/3, \ + b_v=0.0, \ + c_v=1.0, \ + d_v=0.0, \ + e_v=-1.0, \ + f_v=1.0, \ + time_constant_v=1.0, \ + a_w=1.0, \ + b_w=-0.8, \ + c_w=0.7, \ + time_constant_w = 12.5, \ params=None, \ owner=None, \ prefs=None, \ @@ -4584,9 +4598,12 @@ class FHNIntegrator( .. _FHNIntegrator: - Integrates prior value by multiplying `previous_value ` by `rate - ` and adding `increment ` and `noise - `. Ignores `variable `). + Implements the Fitzhugh-Nagumo model using the 4th order Runge Kutta method of numerical integration. The model is + defined by a system of differential equations: dv/dt and dw/dt, which are parameterized as follows: + + time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext + + time_constant_w * dw/dt = a_w * v + b_w * w + c_w Arguments --------- @@ -4595,27 +4612,19 @@ class FHNIntegrator( specifies a template for the value to be integrated; if it is a list or array, each element is independently integrated. - rate : float, list or 1d np.array : default 1.0 - specifies the multiplicative decrement of `previous_value ` (i.e., - the rate of exponential decay). If it is a list or array, it must be the same length as - `variable `. - - increment : float, list or 1d np.array : default 0.0 - specifies an amount to be added to `prevous_value ` in each call to - `function ` (see `increment ` for details). - If it is a list or array, it must be the same length as `variable ` - (see `increment ` for details). - - noise : float, PsyNeuLink Function, list or 1d np.array : default 0.0 - specifies random value to be added to `prevous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` (see `noise ` for details). + initial_w : float, list or 1d np.array : default 0.0 + specifies starting value for integration of dw/dt. If it is a list or array, it must be the same length as + `default_variable ` (see `initializer + ` for details). - initializer float, list or 1d np.array : default 0.0 - specifies starting value for integration. If it is a list or array, it must be the same length as + initial_v : float, list or 1d np.array : default 0.0 + specifies starting value for integration of dv/dt. If it is a list or array, it must be the same length as `default_variable ` (see `initializer ` for details). + t_0 : float : default 0.0 + specifies starting value for time + params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the function. Values specified for parameters in the dictionary override any assigned to those parameters in @@ -4633,56 +4642,17 @@ class FHNIntegrator( ---------- variable : number or np.array - **Ignored** by the FHNIntegrator function. Refer to SimpleIntegrator or AdaptiveIntegrator for - integrator functions that depend on both a prior value and a new value (variable). + External stimulus - rate : float or 1d np.array - determines the multiplicative decrement of `previous_value ` (i.e., the - rate of exponential decay) in each call to `function `. If it is a list or - array, it must be the same length as `variable ` and each element is - used to multiply the corresponding element of `previous_value ` (i.e., - it is used for Hadamard multiplication). If it is a scalar or has a single element, its value is used to - multiply all the elements of `previous_value `. - increment : float, function, list, or 1d np.array - determines the amount added to `previous_value ` in each call to - `function `. If it is a list or array, it must be the same length as - `variable ` and each element is added to the corresponding element of - `previous_value ` (i.e., it is used for Hadamard addition). If it is a - scalar or has a single element, its value is added to all the elements of `previous_value - `. + previous_v : 1d np.array : default ClassDefaults.variable + stores accumulated value of v during integration - noise : float, function, list, or 1d np.array - determines a random value to be added in each call to `function `. - If it is a list or array, it must be the same length as `variable ` and - each element is added to the corresponding element of `previous_value ` - (i.e., it is used for Hadamard addition). If it is a scalar or has a single element, its value is added to all - the elements of `previous_value `. If it is a function, it will be - executed separately and added to each element. + previous_w : 1d np.array : default ClassDefaults.variable + stores accumulated value of w during integration - .. note:: - - In order to generate random noise, a probability distribution function should be selected (see - `Distribution Functions ` for details), which will generate a new noise value from - its distribution on each execution. If noise is specified as a float or as a function with a fixed output, - then the noise will simply be an offset that remains the same across all executions. - - initializer : float, 1d np.array or list - determines the starting value for integration (i.e., the value to which `previous_value - ` is set. If initializer is a list or array, it must be the same length - as `variable `. - - TBI: - - Initializer may be a function or list/array of functions. - - If initializer is specified as a single float or function, while `variable ` is - a list or array, initializer will be applied to each variable element. In the case of an initializer function, - this means that the function will be executed separately for each variable element. - - previous_value : 1d np.array : default ClassDefaults.variable - stores previous value to which `rate ` and `noise ` - will be added. + previous_t : float + stores accumulated value of time, which is incremented by time_step_size on each execution of the function owner : Mechanism `component ` to which the Function has been assigned. @@ -4780,8 +4750,13 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: `previous_value ` combined with `rate ` and - `noise `. + Return: previous_v , previous_w at each time step, which represents the numerical integration of the follwing + system of differential equations: + + time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext + + time_constant_w * dw/dt = a_w * v + b_w * w + c_w + Arguments --------- @@ -4797,7 +4772,7 @@ def function(self, Returns ------- - updated value of integral : 2d np.array + previous_v , previous_w """ From 1b549b9544a408e824d22c9872e9482745daf1ff Mon Sep 17 00:00:00 2001 From: kmantel Date: Wed, 6 Sep 2017 15:54:33 -0400 Subject: [PATCH 30/69] correct bug where keyword is named paramsInstanceDefaults and in some cases would search for attr by the same name (instead of correct nonplural) --- .../ModulatoryProjections/ControlProjection.py | 2 +- .../ModulatoryProjections/GatingProjection.py | 2 +- .../PathwayProjections/MappingProjection.py | 2 +- PsyNeuLink/Components/Projections/Projection.py | 2 +- PsyNeuLink/Components/States/InputState.py | 2 +- .../States/ModulatorySignals/ControlSignal.py | 17 ++++++----------- .../States/ModulatorySignals/GatingSignal.py | 2 +- .../States/ModulatorySignals/LearningSignal.py | 2 +- PsyNeuLink/Components/States/OutputState.py | 2 +- PsyNeuLink/Components/States/ParameterState.py | 4 ++-- PsyNeuLink/Globals/Keywords.py | 6 +++--- 11 files changed, 19 insertions(+), 24 deletions(-) diff --git a/PsyNeuLink/Components/Projections/ModulatoryProjections/ControlProjection.py b/PsyNeuLink/Components/Projections/ModulatoryProjections/ControlProjection.py index 067eab0ee92..2cd8f1e49ed 100644 --- a/PsyNeuLink/Components/Projections/ModulatoryProjections/ControlProjection.py +++ b/PsyNeuLink/Components/Projections/ModulatoryProjections/ControlProjection.py @@ -282,7 +282,7 @@ def __init__(self, receiver is None or receiver.init_status is InitStatus.DEFERRED_INITIALIZATION): self.init_status = InitStatus.DEFERRED_INITIALIZATION - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults # Note: pass name of mechanism (to override assignment of componentName in super.__init__) # super(ControlSignal_Base, self).__init__(sender=sender, super(ControlProjection, self).__init__(sender=sender, diff --git a/PsyNeuLink/Components/Projections/ModulatoryProjections/GatingProjection.py b/PsyNeuLink/Components/Projections/ModulatoryProjections/GatingProjection.py index 52c9ce0fefd..4ede3ef23b6 100644 --- a/PsyNeuLink/Components/Projections/ModulatoryProjections/GatingProjection.py +++ b/PsyNeuLink/Components/Projections/ModulatoryProjections/GatingProjection.py @@ -266,7 +266,7 @@ def __init__(self, # Flag for deferred initialization self.init_status = InitStatus.DEFERRED_INITIALIZATION - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults # Note: pass name of mechanism (to override assignment of componentName in super.__init__) super().__init__(sender=sender, receiver=receiver, diff --git a/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py b/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py index 0bf8ead3137..8a1ac6f133d 100644 --- a/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py +++ b/PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py @@ -424,7 +424,7 @@ def __init__(self, if sender is None or receiver is None: self.init_status = InitStatus.DEFERRED_INITIALIZATION - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults super(MappingProjection, self).__init__(sender=sender, receiver=receiver, params=params, diff --git a/PsyNeuLink/Components/Projections/Projection.py b/PsyNeuLink/Components/Projections/Projection.py index fc98741203d..4a3e01af574 100644 --- a/PsyNeuLink/Components/Projections/Projection.py +++ b/PsyNeuLink/Components/Projections/Projection.py @@ -582,7 +582,7 @@ def __init__(self, # FIX: ?LEAVE IT TO _validate_variable, SINCE SENDER MAY NOT YET HAVE BEEN INSTANTIATED # MODIFIED 6/12/16: ADDED ASSIGNMENT ABOVE # (TO HANDLE INSTANTIATION OF DEFAULT ControlProjection SENDER -- BUT WHY ISN'T VALUE ESTABLISHED YET? - # Validate variable, function and params, and assign params to paramsInstanceDefaults + # Validate variable, function and params, and assign params to paramInstanceDefaults # Note: pass name of mechanism (to override assignment of componentName in super.__init__) super(Projection_Base, self).__init__(default_variable=variable, param_defaults=params, diff --git a/PsyNeuLink/Components/States/InputState.py b/PsyNeuLink/Components/States/InputState.py index 96d2d845dd5..bc64ff960ed 100644 --- a/PsyNeuLink/Components/States/InputState.py +++ b/PsyNeuLink/Components/States/InputState.py @@ -489,7 +489,7 @@ def __init__(self, self.reference_value = reference_value - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults # Note: pass name of owner (to override assignment of componentName in super.__init__) super(InputState, self).__init__(owner, variable=variable, diff --git a/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py b/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py index af4eca9da68..d3dea8db37d 100644 --- a/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py +++ b/PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py @@ -277,6 +277,7 @@ import inspect import warnings + from enum import IntEnum import numpy as np @@ -286,24 +287,18 @@ # import Components # FIX: EVCMechanism IS IMPORTED HERE TO DEAL WITH COST FUNCTIONS THAT ARE DEFINED IN EVCMechanism # SHOULD THEY BE LIMITED TO EVC?? -from PsyNeuLink.Components.Functions.Function import CombinationFunction, Exponential, IntegratorFunction, Linear, \ - LinearCombination, Reduce, SimpleIntegrator, TransferFunction, _is_modulation_param, is_function_type +from PsyNeuLink.Components.Functions.Function import CombinationFunction, Exponential, IntegratorFunction, Linear, LinearCombination, Reduce, SimpleIntegrator, TransferFunction, _is_modulation_param, is_function_type from PsyNeuLink.Components.ShellClasses import Function from PsyNeuLink.Components.States.ModulatorySignals.ModulatorySignal import ModulatorySignal from PsyNeuLink.Components.States.OutputState import PRIMARY_OUTPUT_STATE from PsyNeuLink.Components.States.State import State_Base from PsyNeuLink.Globals.Defaults import defaultControlAllocation -from PsyNeuLink.Globals.Keywords import ALLOCATION_SAMPLES, AUTO, CONTROLLED_PARAM, CONTROL_PROJECTION, EXECUTING, \ - FUNCTION, FUNCTION_PARAMS, INTERCEPT, OFF, ON, OUTPUT_STATES, OUTPUT_STATE_PARAMS, PROJECTION_TYPE, SEPARATOR_BAR, \ - SLOPE, SUM, kwAssign +from PsyNeuLink.Globals.Keywords import ALLOCATION_SAMPLES, AUTO, CONTROLLED_PARAM, CONTROL_PROJECTION, EXECUTING, FUNCTION, FUNCTION_PARAMS, INTERCEPT, OFF, ON, OUTPUT_STATES, OUTPUT_STATE_PARAMS, PROJECTION_TYPE, SEPARATOR_BAR, SLOPE, SUM, kwAssign from PsyNeuLink.Globals.Log import LogEntry, LogLevel from PsyNeuLink.Globals.Preferences.ComponentPreferenceSet import is_pref_set from PsyNeuLink.Globals.Preferences.PreferenceSet import PreferenceLevel -from PsyNeuLink.Globals.Utilities import is_numeric, iscompatible, kwCompatibilityLength, kwCompatibilityNumeric, \ - kwCompatibilityType -from PsyNeuLink.Library.Mechanisms.AdaptiveMechanisms.ControlMechanisms.EVC.EVCMechanism import \ - ADJUSTMENT_COST_FUNCTION, COST_COMBINATION_FUNCTION, DURATION_COST_FUNCTION, INTENSITY_COST_FUNCTION, \ - costFunctionNames, kpAdjustmentCost, kpAllocation, kpCost, kpDurationCost, kpIntensity, kpIntensityCost +from PsyNeuLink.Globals.Utilities import is_numeric, iscompatible, kwCompatibilityLength, kwCompatibilityNumeric, kwCompatibilityType +from PsyNeuLink.Library.Mechanisms.AdaptiveMechanisms.ControlMechanisms.EVC.EVCMechanism import ADJUSTMENT_COST_FUNCTION, COST_COMBINATION_FUNCTION, DURATION_COST_FUNCTION, INTENSITY_COST_FUNCTION, costFunctionNames, kpAdjustmentCost, kpAllocation, kpCost, kpDurationCost, kpIntensity, kpIntensityCost from PsyNeuLink.Scheduling.TimeScale import CurrentTime, TimeScale # class OutputStateLog(IntEnum): @@ -635,7 +630,7 @@ def __init__(self, # Consider adding self to owner.outputStates here (and removing from ControlProjection._instantiate_sender) # (test for it, and create if necessary, as per OutputStates in ControlProjection._instantiate_sender), - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults super().__init__(owner=owner, reference_value=reference_value, variable=variable, diff --git a/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py b/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py index 276cee518aa..6537d052a26 100644 --- a/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py +++ b/PsyNeuLink/Components/States/ModulatorySignals/GatingSignal.py @@ -421,7 +421,7 @@ def __init__(self, # Consider adding self to owner.outputStates here (and removing from GatingProjection._instantiate_sender) # (test for it, and create if necessary, as per OutputStates in GatingProjection._instantiate_sender), - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults super().__init__(owner=owner, reference_value=reference_value, variable=variable, diff --git a/PsyNeuLink/Components/States/ModulatorySignals/LearningSignal.py b/PsyNeuLink/Components/States/ModulatorySignals/LearningSignal.py index a7ab064df7f..578d55b7f34 100644 --- a/PsyNeuLink/Components/States/ModulatorySignals/LearningSignal.py +++ b/PsyNeuLink/Components/States/ModulatorySignals/LearningSignal.py @@ -372,7 +372,7 @@ def __init__(self, # Consider adding self to owner.outputStates here (and removing from LearningProjection._instantiate_sender) # (test for it, and create if necessary, as per OutputStates in LearningProjection._instantiate_sender), - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults super().__init__(owner=owner, reference_value=reference_value, variable=variable, diff --git a/PsyNeuLink/Components/States/OutputState.py b/PsyNeuLink/Components/States/OutputState.py index 74620c7970d..3161b5ca441 100644 --- a/PsyNeuLink/Components/States/OutputState.py +++ b/PsyNeuLink/Components/States/OutputState.py @@ -609,7 +609,7 @@ def __init__(self, # Consider adding self to owner.outputStates here (and removing from ControlProjection._instantiate_sender) # (test for it, and create if necessary, as per OutputStates in ControlProjection._instantiate_sender), - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults super().__init__(owner, variable=variable, size=size, diff --git a/PsyNeuLink/Components/States/ParameterState.py b/PsyNeuLink/Components/States/ParameterState.py index 67672ed60ee..fe3759bc69e 100644 --- a/PsyNeuLink/Components/States/ParameterState.py +++ b/PsyNeuLink/Components/States/ParameterState.py @@ -253,8 +253,8 @@ import inspect -import typecheck as tc import numpy as np +import typecheck as tc from PsyNeuLink.Components.Component import Component, function_type, method_type, parameter_keywords from PsyNeuLink.Components.Functions.Function import Linear, get_param_value_for_keyword @@ -456,7 +456,7 @@ def __init__(self, self.reference_value = reference_value - # Validate sender (as variable) and params, and assign to variable and paramsInstanceDefaults + # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults # Note: pass name of Mechanism (to override assignment of componentName in super.__init__) super(ParameterState, self).__init__(owner, variable=variable, diff --git a/PsyNeuLink/Globals/Keywords.py b/PsyNeuLink/Globals/Keywords.py index 7c90b52f0f2..f921b3900b4 100644 --- a/PsyNeuLink/Globals/Keywords.py +++ b/PsyNeuLink/Globals/Keywords.py @@ -95,11 +95,11 @@ class MatrixKeywords: ---------- IDENTITY_MATRIX - a square matrix of 1's along the diagnoal, 0's elsewhere; this requires that the length of the sender and + a square matrix of 1's along the diagnoal, 0's elsewhere; this requires that the length of the sender and receiver values are the same. HOLLOW_MATRIX - a square matrix of 0's along the diagnoal, 1's elsewhere; this requires that the length of the sender and + a square matrix of 0's along the diagnoal, 1's elsewhere; this requires that the length of the sender and receiver values are the same. FULL_CONNECTIVITY_MATRIX @@ -266,7 +266,7 @@ def _names(self): FUNCTION_PARAMS = "function_params" # Params used to instantiate or assign to a FUNCTION PARAM_CLASS_DEFAULTS = "paramClassDefaults" # "Factory" default params for a Function -PARAM_INSTANCE_DEFAULTS = "paramsInstanceDefaults" # Params used to instantiate a Function; supercede paramClassDefaults +PARAM_INSTANCE_DEFAULTS = "paramInstanceDefaults" # Params used to instantiate a Function; supercede paramClassDefaults PARAMS_CURRENT = "paramsCurrent" # Params currently in effect for an instance of a Function # in general, this includes params specifed as arg in a # to Function.execute; however, there are some exceptions From a042f0a5a0bf7681434e7ae83cc09e8d893444c2 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Wed, 6 Sep 2017 18:01:40 -0400 Subject: [PATCH 31/69] cleaning up argument/param names in FHN function, tried adding a pytest to test how it translates to the form in the Gilzenrat paper, but it's not working --- PsyNeuLink/Components/Functions/Function.py | 58 +++++++++-------- tests/mechanisms/test_integrator_mechanism.py | 62 ++++++++++++++++++- 2 files changed, 91 insertions(+), 29 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index bbed8e2171a..b613c061bcf 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -3377,14 +3377,19 @@ def _runge_kutta_4(self, previous_value, previous_time, slope, time_step_size): if callable(slope): slope_approx_1 = slope(previous_time, previous_value) + slope_approx_2 = slope(previous_time + time_step_size/2, previous_value + (0.5 * time_step_size * slope_approx_1)) + slope_approx_3 = slope(previous_time + time_step_size/2, previous_value + (0.5 * time_step_size * slope_approx_2)) + slope_approx_4 = slope(previous_time + time_step_size, previous_value + (time_step_size * slope_approx_3)) + value = previous_value \ + (time_step_size/6)*(slope_approx_1 + 2*(slope_approx_2 + slope_approx_3) + slope_approx_4) + else: value = previous_value + time_step_size*slope @@ -4690,41 +4695,41 @@ def __init__(self, initial_v=0.0, time_step_size=0.1, t_0=0.0, - v_a=-1/3, - v_b=0.0, - v_c=1.0, - v_d=0.0, - v_e=-1.0, - v_f=1.0, - v_time_constant=1.0, - w_a=1.0, - w_b=-0.8, - w_c=0.7, - w_time_constant = 12.5, + a_v=-1/3, + b_v=0.0, + c_v=1.0, + d_v=0.0, + e_v=-1.0, + f_v=1.0, + time_constant_v=1.0, + a_w=1.0, + b_w=-0.8, + c_w=0.7, + time_constant_w = 12.5, params: tc.optional(dict) = None, owner=None, prefs: is_pref_set = None, context="FHNIntegrator Init"): # Assign args to params and functionParams dicts (kwConstants must == arg names) - params = self._assign_args_to_param_dicts(default_variable = default_variable, + params = self._assign_args_to_param_dicts(default_variable=default_variable, offset=offset, scale=scale, initial_v=initial_v, initial_w=initial_w, time_step_size=time_step_size, t_0=t_0, - v_a=v_a, - v_b=v_b, - v_c=v_c, - v_d=v_d, - v_e=v_e, - v_f=v_f, - v_time_constant=v_time_constant, - w_a=w_a, - w_b=w_b, - w_c=w_c, - w_time_constant=w_time_constant, + a_v=a_v, + b_v=b_v, + c_v=c_v, + d_v=d_v, + e_v=e_v, + f_v=f_v, + time_constant_v=time_constant_v, + a_w=a_w, + b_w=b_w, + c_w=c_w, + time_constant_w=time_constant_w, params=params) self.previous_v = self.initial_v @@ -4783,14 +4788,15 @@ def dv_dt(time, v): # return v - (v**3)/3 - self.previous_w + variable # general: - return (self.v_a*v**3 + self.v_b*v**2 + self.v_c*v + self.v_d - + self.v_e*self.previous_w + self.v_f*variable)/self.v_time_constant + val= (self.a_v*(v**3) + self.b_v*(v**2) + self.c_v*v + self.d_v + + self.e_v*self.previous_w + self.f_v*variable)/self.time_constant_v + return val def dw_dt(time, w): # standard coeffs: # return self.a*(self.previous_v + self.b - self.c*w) # general: - return (self.w_a*self.previous_v + self.w_b*w + self.w_c)/self.w_time_constant + return (self.a_w*self.previous_v + self.b_w*w + self.c_w)/self.time_constant_w new_v = self._runge_kutta_4(previous_time=self.previous_t, previous_value=self.previous_v, diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index a65de27db35..21f3e3912bf 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -650,7 +650,7 @@ def test_integrator_ornstein_uhlenbeck_noise_val(self): class TestFHN: - def test_FHN(self): + def test_FHN_defaults(self): F = IntegratorMechanism( name='IntegratorMech-FHNFunction', @@ -676,8 +676,8 @@ def test_FHN(self): # ** uncomment the lines below if you want to view the plot: # import matplotlib.pyplot as plt - # plt.plot(v_list) - # plt.plot(w_list) + # plt.plot(plot_v_list) + # plt.plot(plot_w_list) # plt.show() np.testing.assert_allclose(expected_v_list, [1.9861589924245777, 1.9184159304279109, 1.7920107368145777, @@ -688,3 +688,59 @@ def test_FHN(self): 1.2070585850028435, 1.4068978270680454, 1.5629844531368104, 1.6793901854329185, 1.7583410650743645, 1.7981128658110572, 1.7817328532815251]) + + # def test_FHN_gilzenrat(self): + # + # F = IntegratorMechanism( + # name='IntegratorMech-FHNFunction', + # function=FHNIntegrator( + # time_step_size=0.0001, + # initial_v=0.2, + # initial_w=0.0, + # t_0=0.0, + # time_constant_v=1.0, + # a_v=-1.0, + # b_v=1.5, + # c_v=-0.5, + # d_v=0.0, + # e_v=-1.0, + # f_v=0.0, + # time_constant_w=100.0, + # a_w=1.0, + # b_w=-0.5, + # c_w=0.0 + # ) + # ) + # plot_v_list = [] + # plot_w_list = [] + # + # expected_v_list = [] + # expected_w_list = [] + # stimulus = 0.0 + # for i in range(10): + # + # for j in range(200): + # new_v = F.execute(stimulus)[0][0] + # new_w = F.execute(stimulus)[1][0] + # # ** uncomment the lines below if you want to view the plot: + # plot_v_list.append(new_v) + # plot_w_list.append(new_w) + # expected_v_list.append(new_v) + # expected_w_list.append(new_w) + # # print(plot_v_list) + # # print(plot_w_list) + # # ** uncomment the lines below if you want to view the plot: + # import matplotlib.pyplot as plt + # plt.plot(plot_v_list) + # plt.plot(plot_w_list) + # plt.show() + # + # # np.testing.assert_allclose(expected_v_list, [1.9861589924245777, 1.9184159304279109, 1.7920107368145777, + # # 1.6651158106802393, 1.5360917598075965, 1.4019128309448776, + # # 1.2568420252868404, 1.08773745582042, 0.8541804646541804, + # # 0.34785588139530099]) + # # np.testing.assert_allclose(expected_w_list, [0.28713219302304327, 0.65355262255707869, 0.9581082373550347, + # # 1.2070585850028435, 1.4068978270680454, 1.5629844531368104, + # # 1.6793901854329185, 1.7583410650743645, 1.7981128658110572, + # # 1.7817328532815251]) + # # From ab0962ce8534dca607ff834db9ff40e7f8897007 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 7 Sep 2017 14:25:26 -0400 Subject: [PATCH 32/69] adding 'uncorellated_activity' and 'electrotonic_coupling' arguments to be used by the dw_dt equation - this allows for a more direct comparison with the gilzenrat paper --- PsyNeuLink/Components/Functions/Function.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index b613c061bcf..2eac0cab8e2 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4595,6 +4595,8 @@ class FHNIntegrator( a_w=1.0, \ b_w=-0.8, \ c_w=0.7, \ + electrotonic_coupling=1.0, \ + uncorrelated_activity=0.0 \ time_constant_w = 12.5, \ params=None, \ owner=None, \ @@ -4706,6 +4708,8 @@ def __init__(self, b_w=-0.8, c_w=0.7, time_constant_w = 12.5, + electrotonic_coupling = 1.0, + uncorrelated_activity = 0.0, params: tc.optional(dict) = None, owner=None, prefs: is_pref_set = None, @@ -4729,6 +4733,8 @@ def __init__(self, a_w=a_w, b_w=b_w, c_w=c_w, + electrotonic_coupling=electrotonic_coupling, + uncorrelated_activity=uncorrelated_activity, time_constant_w=time_constant_w, params=params) @@ -4784,19 +4790,14 @@ def function(self, variable = self.variable def dv_dt(time, v): - # standard coeffs: - # return v - (v**3)/3 - self.previous_w + variable - # general: val= (self.a_v*(v**3) + self.b_v*(v**2) + self.c_v*v + self.d_v + self.e_v*self.previous_w + self.f_v*variable)/self.time_constant_v return val def dw_dt(time, w): - # standard coeffs: - # return self.a*(self.previous_v + self.b - self.c*w) - # general: - return (self.a_w*self.previous_v + self.b_w*w + self.c_w)/self.time_constant_w + return (self.electrotonic_coupling*self.a_w*self.previous_v + self.b_w*w + self.c_w + + (1-self.electrotonic_coupling)*self.uncorrelated_activity)/self.time_constant_w new_v = self._runge_kutta_4(previous_time=self.previous_t, previous_value=self.previous_v, From 668929576c01f9dd752b2d1729b4bd418e321894 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 7 Sep 2017 15:21:53 -0400 Subject: [PATCH 33/69] FHN pytests still not matching gilzenrat - commenting out --- tests/mechanisms/test_integrator_mechanism.py | 61 ++++++++++++++++++- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/tests/mechanisms/test_integrator_mechanism.py b/tests/mechanisms/test_integrator_mechanism.py index 21f3e3912bf..8a75cec45e7 100644 --- a/tests/mechanisms/test_integrator_mechanism.py +++ b/tests/mechanisms/test_integrator_mechanism.py @@ -694,7 +694,7 @@ def test_FHN_defaults(self): # F = IntegratorMechanism( # name='IntegratorMech-FHNFunction', # function=FHNIntegrator( - # time_step_size=0.0001, + # time_step_size=0.1, # initial_v=0.2, # initial_w=0.0, # t_0=0.0, @@ -719,7 +719,64 @@ def test_FHN_defaults(self): # stimulus = 0.0 # for i in range(10): # - # for j in range(200): + # for j in range(50): + # new_v = F.execute(stimulus)[0][0] + # new_w = F.execute(stimulus)[1][0] + # # ** uncomment the lines below if you want to view the plot: + # plot_v_list.append(new_v) + # plot_w_list.append(new_w) + # expected_v_list.append(new_v) + # expected_w_list.append(new_w) + # # print(plot_v_list) + # # print(plot_w_list) + # # ** uncomment the lines below if you want to view the plot: + # import matplotlib.pyplot as plt + # plt.plot(plot_v_list) + # plt.plot(plot_w_list) + # plt.show() + # + # # np.testing.assert_allclose(expected_v_list, [1.9861589924245777, 1.9184159304279109, 1.7920107368145777, + # # 1.6651158106802393, 1.5360917598075965, 1.4019128309448776, + # # 1.2568420252868404, 1.08773745582042, 0.8541804646541804, + # # 0.34785588139530099]) + # # np.testing.assert_allclose(expected_w_list, [0.28713219302304327, 0.65355262255707869, 0.9581082373550347, + # # 1.2070585850028435, 1.4068978270680454, 1.5629844531368104, + # # 1.6793901854329185, 1.7583410650743645, 1.7981128658110572, + # # 1.7817328532815251]) + # # + # + # def test_FHN_gilzenrat_low_electrotonic_coupling(self): + # + # F = IntegratorMechanism( + # name='IntegratorMech-FHNFunction', + # function=FHNIntegrator( + # time_step_size=0.1, + # initial_v=0.2, + # initial_w=0.0, + # t_0=0.0, + # time_constant_v=1.0, + # a_v=-1.0, + # b_v=0.5, + # c_v=0.5, + # d_v=0.0, + # e_v=-1.0, + # f_v=0.0, + # electrotonic_coupling=0.55, + # time_constant_w=100.0, + # a_w=1.0, + # b_w=-0.5, + # c_w=0.0 + # ) + # ) + # plot_v_list = [] + # plot_w_list = [] + # + # expected_v_list = [] + # expected_w_list = [] + # stimulus = 0.0 + # for i in range(10): + # + # for j in range(600): # new_v = F.execute(stimulus)[0][0] # new_w = F.execute(stimulus)[1][0] # # ** uncomment the lines below if you want to view the plot: From ffdcc88b71bb2967c7240df5dc10f99339928aa2 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 7 Sep 2017 16:05:11 -0400 Subject: [PATCH 34/69] removing typos and updating language in DDM docs (not linking to TIME_STEP and TRIAL anymore as these have different meanings in the scheduler) --- .../ProcessingMechanisms/IntegratorMechanisms/DDM.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/IntegratorMechanisms/DDM.py b/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/IntegratorMechanisms/DDM.py index 5b3513d2013..7dc8182eb41 100644 --- a/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/IntegratorMechanisms/DDM.py +++ b/PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/IntegratorMechanisms/DDM.py @@ -269,12 +269,15 @@ ` and `RESPONSE_TIME `, and assigns these as the first two items of its `value ` attribute, irrespective of its function. -When an `analytic ` function is selected, the same set of values is returned for every execution, -that are determined entirely by the set of parameters passed to its `function `; generally, this -corresponds to a `TRIAL` of execution. +When an `analytic ` function is selected, the same set of values is returned for every execution. +The returned values are determined entirely by the set of parameters passed to its `function `. When the `path integration `, function is selected, a single step of integration is conducted each -time the Mechanism is executed; generally, this corresponds to a `TIME_STEP` of execution. +time the Mechanism is executed. The returned values accumulate on every execution. + +The analytic functions return a final positon and time of the model, along with other statistics, where as the path +integration function returns intermediate position and time values. The two types of functions can be thought of as +happening on different time scales: trial (analytic) and time step (path integration). .. _DDM_Class_Reference: From 4181a62230a2b383eed716cab10e3ccc014c2a55 Mon Sep 17 00:00:00 2001 From: KristenManning Date: Thu, 7 Sep 2017 17:13:29 -0400 Subject: [PATCH 35/69] adding documentation of arguments of FHN function and adding FHN function to the Functions page --- PsyNeuLink/Components/Functions/Function.py | 147 ++++++++++++++++---- docs/source/Function.rst | 1 + 2 files changed, 122 insertions(+), 26 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index d6e812e4a1a..119ad1d0405 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -31,6 +31,7 @@ * `DriftDiffusionIntegrator` * `OrnsteinUhlenbeckIntegrator` * `AccumulatorIntegrator` + * `FHNIntegrator` * `BogaczEtAl` * `NavarroAndFuss` @@ -3050,11 +3051,13 @@ class IntegratorFunction(Function_Base): # • can noise and initializer be an array? If so, validated in validate_param? class Integrator(IntegratorFunction): # -------------------------------------------------------------------------------- - """Function that accumulates over many executions by storing its value from the most recent execution and using this - to compute its new value + """ + + Function that accumulates over many executions by storing its value from the most recent execution and using this + to compute its new value. All TransferFunctions must have the attribute `previous_value`, which specifies the value of the function on the - previous execution, and the attribute `initializer`, which sets `previous_value` on the first execution + previous execution, and the attribute `initializer`, which sets `previous_value` on the first execution. """ @@ -4467,33 +4470,69 @@ class FHNIntegrator( .. _FHNIntegrator: - Implements the Fitzhugh-Nagumo model using the 4th order Runge Kutta method of numerical integration. The model is - defined by a system of differential equations: dv/dt and dw/dt, which are parameterized as follows: - - time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext - - time_constant_w * dw/dt = a_w * v + b_w * w + c_w + Implements the Fitzhugh-Nagumo model using the 4th order Runge Kutta method of numerical integration. Arguments --------- default_variable : number, list or np.array : default ClassDefaults.variable - specifies a template for the value to be integrated; if it is a list or array, each element is independently - integrated. + specifies a template for the external stimulus initial_w : float, list or 1d np.array : default 0.0 specifies starting value for integration of dw/dt. If it is a list or array, it must be the same length as - `default_variable ` (see `initializer - ` for details). + `default_variable ` initial_v : float, list or 1d np.array : default 0.0 specifies starting value for integration of dv/dt. If it is a list or array, it must be the same length as - `default_variable ` (see `initializer - ` for details). + `default_variable ` + + time_step_size : float : default 0.1 + specifies the time step size of numerical integration t_0 : float : default 0.0 specifies starting value for time + a_v : float : default -1/3 + coefficient on the v^3 term of the dv/dt equation + + b_v : float : default 0.0 + coefficient on the v^2 term of the dv/dt equation + + c_v : float : default 1.0 + coefficient on the v term of the dv/dt equation + + d_v : float : default 0.0 + constant term in the dv/dt equation + + e_v : float : default -1.0 + coefficient on the w term in the dv/dt equation + + f_v : float : default 1.0 + coefficient on the external stimulus (`variable `) term in the dv/dt equation + + time_constant_v : float : default 1.0 + scaling factor on the dv/dt equation + + a_w : float : default 1.0, + coefficient on the v term of the dw/dt equation + + b_w : float : default -0.8, + coefficient on the w term of the dv/dt equation + + c_w : float : default 0.7, + constant term in the dw/dt equation + + electrotonic_coupling : float : default 1.0 + coefficient which simulates electrotonic coupling by scaling the values of dw/dt such that the v term + (representing the input from the LC) increases when the uncorrelated_activity term (representing baseline + activity) decreases + + uncorrelated_activity : float : default 0.0 + constant term in the dw/dt equation + + time_constant_w : float : default 12.5 + scaling factor on the dv/dt equation + params : Optional[Dict[param keyword, param value]] a `parameter dictionary ` that specifies the parameters for the function. Values specified for parameters in the dictionary override any assigned to those parameters in @@ -4507,6 +4546,7 @@ class FHNIntegrator( defined in __init__.py (see :doc:`PreferenceSet ` for details). + Attributes ---------- @@ -4526,6 +4566,61 @@ class FHNIntegrator( owner : Mechanism `component ` to which the Function has been assigned. + initial_w : float, list or 1d np.array : default 0.0 + specifies starting value for integration of dw/dt. If it is a list or array, it must be the same length as + `default_variable ` + + initial_v : float, list or 1d np.array : default 0.0 + specifies starting value for integration of dv/dt. If it is a list or array, it must be the same length as + `default_variable ` + + time_step_size : float : default 0.1 + specifies the time step size of numerical integration + + t_0 : float : default 0.0 + specifies starting value for time + + a_v : float : default -1/3 + coefficient on the v^3 term of the dv/dt equation + + b_v : float : default 0.0 + coefficient on the v^2 term of the dv/dt equation + + c_v : float : default 1.0 + coefficient on the v term of the dv/dt equation + + d_v : float : default 0.0 + constant term in the dv/dt equation + + e_v : float : default -1.0 + coefficient on the w term in the dv/dt equation + + f_v : float : default 1.0 + coefficient on the external stimulus ('variable `) term in the dv/dt equation + + time_constant_v : float : default 1.0 + scaling factor on the dv/dt equation + + a_w : float : default 1.0, + coefficient on the v term of the dw/dt equation + + b_w : float : default -0.8, + coefficient on the w term of the dv/dt equation + + c_w : float : default 0.7, + constant term in the dw/dt equation + + electrotonic_coupling : float : default 1.0 + coefficient which simulates electrotonic coupling by scaling the values of dw/dt such that the v term + (representing the input from the LC) increases when the uncorrelated_activity term (representing baseline + activity) decreases + + uncorrelated_activity : float : default 0.0 + constant term in the dw/dt equation + + time_constant_w : float : default 12.5 + scaling factor on the dv/dt equation + prefs : PreferenceSet or specification dict : Projection.classPreferences the `PreferenceSet` for function. Specified in the **prefs** argument of the constructor for the function; if it is not specified, a default is assigned using `classPreferences` defined in __init__.py @@ -4546,9 +4641,9 @@ class ClassDefaults(Integrator.ClassDefaults): INCREMENT: None, }) - # multiplicative param does not make sense in this case - multiplicative_param = RATE - additive_param = INCREMENT + + multiplicative_param = SCALE + additive_param = OFFSET @tc.typecheck def __init__(self, @@ -4623,12 +4718,15 @@ def function(self, time_scale=TimeScale.TRIAL, context=None): """ - Return: previous_v , previous_w at each time step, which represents the numerical integration of the follwing - system of differential equations: + Return: current v, current w + + The model is defined by the following system of differential equations: + + time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext + + time_constant_w * dw/dt = electrotonic_coupling * a_w * v + b_w * w + c_w + (1 - self.electrotonic_coupling) * self.uncorrelated_activity - time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext - time_constant_w * dw/dt = a_w * v + b_w * w + c_w Arguments @@ -4639,13 +4737,10 @@ def function(self, function. Values specified for parameters in the dictionary override any assigned to those parameters in arguments of the constructor. - time_scale : TimeScale : default TimeScale.TRIAL - specifies whether the function is executed on the time_step or trial time scale. - Returns ------- - previous_v , previous_w + current value of v , current value of w : float, list, or np.array """ diff --git a/docs/source/Function.rst b/docs/source/Function.rst index b2d3bd679c1..7629a09011f 100644 --- a/docs/source/Function.rst +++ b/docs/source/Function.rst @@ -23,6 +23,7 @@ Functions AdaptiveIntegrator, DriftDiffusionIntegrator, OrnsteinUhlenbeckIntegrator, + FHNIntegrator, AccumulatorIntegrator, BogaczEtAl, NavarroAndFuss, From b105326c5bf0aa40249009299247dc9414367854 Mon Sep 17 00:00:00 2001 From: Kristen Manning Date: Thu, 7 Sep 2017 17:49:02 -0400 Subject: [PATCH 36/69] renaming electrotonic_coupling to mode in FHN integrator function --- PsyNeuLink/Components/Functions/Function.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/PsyNeuLink/Components/Functions/Function.py b/PsyNeuLink/Components/Functions/Function.py index 119ad1d0405..309717472bf 100644 --- a/PsyNeuLink/Components/Functions/Function.py +++ b/PsyNeuLink/Components/Functions/Function.py @@ -4460,7 +4460,7 @@ class FHNIntegrator( a_w=1.0, \ b_w=-0.8, \ c_w=0.7, \ - electrotonic_coupling=1.0, \ + mode=1.0, \ uncorrelated_activity=0.0 \ time_constant_w = 12.5, \ params=None, \ @@ -4522,7 +4522,7 @@ class FHNIntegrator( c_w : float : default 0.7, constant term in the dw/dt equation - electrotonic_coupling : float : default 1.0 + mode : float : default 1.0 coefficient which simulates electrotonic coupling by scaling the values of dw/dt such that the v term (representing the input from the LC) increases when the uncorrelated_activity term (representing baseline activity) decreases @@ -4610,7 +4610,7 @@ class FHNIntegrator( c_w : float : default 0.7, constant term in the dw/dt equation - electrotonic_coupling : float : default 1.0 + mode : float : default 1.0 coefficient which simulates electrotonic coupling by scaling the values of dw/dt such that the v term (representing the input from the LC) increases when the uncorrelated_activity term (representing baseline activity) decreases @@ -4665,7 +4665,7 @@ def __init__(self, b_w=-0.8, c_w=0.7, time_constant_w = 12.5, - electrotonic_coupling = 1.0, + mode = 1.0, uncorrelated_activity = 0.0, params: tc.optional(dict) = None, owner=None, @@ -4690,7 +4690,7 @@ def __init__(self, a_w=a_w, b_w=b_w, c_w=c_w, - electrotonic_coupling=electrotonic_coupling, + mode=mode, uncorrelated_activity=uncorrelated_activity, time_constant_w=time_constant_w, params=params) @@ -4724,7 +4724,7 @@ def function(self, time_constant_v * dv/dt = a_v * v^3 + b_v * v^2 + c_v*v^2 + d_v + e_v * w + f_v * I_ext - time_constant_w * dw/dt = electrotonic_coupling * a_w * v + b_w * w + c_w + (1 - self.electrotonic_coupling) * self.uncorrelated_activity + time_constant_w * dw/dt = mode * a_w * v + b_w * w + c_w + (1 - self.mode) * self.uncorrelated_activity @@ -4753,8 +4753,8 @@ def dv_dt(time, v): return val def dw_dt(time, w): - return (self.electrotonic_coupling*self.a_w*self.previous_v + self.b_w*w + self.c_w + - (1-self.electrotonic_coupling)*self.uncorrelated_activity)/self.time_constant_w + return (self.mode*self.a_w*self.previous_v + self.b_w*w + self.c_w + + (1-self.mode)*self.uncorrelated_activity)/self.time_constant_w new_v = self._runge_kutta_4(previous_time=self.previous_t, previous_value=self.previous_v, From afe5e5bd1745da4a74a6a5491d04bac3fa89ef02 Mon Sep 17 00:00:00 2001 From: jdcpni Date: Tue, 12 Sep 2017 16:48:12 -0400 Subject: [PATCH 37/69] Refactor control mech merged devel (#453) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * • index.rst - main title changed to "Welcome to PsyNeuLink" * • ControlMechanisms monitor_for_control argument and attribute changed to objective_mechanism * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # docs/source/index.rst * Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeuLink into devel # Conflicts: # docs/source/index.rst * Merge branches 'devel' and 'feat/Mechanism/LC' of https://github.com/PrincetonUniversity/PsyNeuLink into feat/Mechanism/LC # Conflicts: # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py # Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py * Merge branches 'devel' and 'feat/Mechanism/LC' of https://github.com/PrincetonUniversity/PsyNeuLink into feat/Mechanism/LC # Conflicts: # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py # Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py * Merge branches 'devel' and 'feat/Mechanism/LC' of https://github.com/PrincetonUniversity/PsyNeuLink into feat/Mechanism/LC # Conflicts: # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py # Scripts/DEBUGGING SCRIPTS/LCMechanism Test Script.py * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * Merge branch 'refac/ControlMechanisms/monitor_for_control_to_objective_mechanism' of https://github.com/PrincetonUniversity/PsyNeuLink into refac/ControlMechanisms/monitor_for_control_to_objective_mechanism # Conflicts: # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py * Merge branch 'refac/ControlMechanisms/monitor_for_control_to_objective_mechanism' of https://github.com/PrincetonUniversity/PsyNeuLink into refac/ControlMechanisms/monitor_for_control_to_objective_mechanism # Conflicts: # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py * Merge branch 'refac/ControlMechanisms/monitor_for_control_to_objective_mechanism' of https://github.com/PrincetonUniversity/PsyNeuLink into refac/ControlMechanisms/monitor_for_control_to_objective_mechanism # Conflicts: # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * • ControlMechanism, EVCMechanism, ControlSignal, ObjectiveMechanism and System: - all refactored to make simpler and more modular: * most instantiation code moved from EVCMechaism to ControlMechanism * moved handling of monitored_output_states to ObjectiveMechanism monitored_values * moved parsing of ControlSignal specification from ControlMechanism to ControlSignal * moved identification of monitored_output_states and control_signals for System to methods on System * moved assignment of controller from ControlMechanism to System * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * • System - removed assignment of default controller for System, can now be None * Merge branches 'devel' and 'refactor_control_mech_complete' of https://github.com/PrincetonUniversity/PsyNeuLink into refactor_control_mech_complete # Conflicts: # PsyNeuLink/Components/Functions/Function.py # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/ControlMechanism.py # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/ControlMechanism/DefaultControlMechanism.py # PsyNeuLink/Components/Mechanisms/AdaptiveMechanisms/LearningMechanism/LearningMechanism.py # PsyNeuLink/Components/Mechanisms/ProcessingMechanisms/ObjectiveMechanism.py # PsyNeuLink/Components/Projections/PathwayProjections/MappingProjection.py # PsyNeuLink/Components/States/ModulatorySignals/ControlSignal.py # PsyNeuLink/Components/System.py # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/EVC/EVCMechanism.py # PsyNeuLink/Library/Mechanisms/AdaptiveMechanisms/ControlMechanisms/LCMechanism.py # PsyNeuLink/Library/Mechanisms/ProcessingMechanisms/ObjectiveMechanisms/ComparatorMechanism.py # PsyNeuLink/__init__.py # Scripts/TEST SCRIPTS/EVC System Laming Validation Test Script.py # tests/mechanisms/test_integrator_mechanism.py * - * - * - --- .idea/runConfigurations/Make_HTML.xml | 2 +- ...C_System_Laming_Validation_Test_Script.xml | 4 +- .../_Multilayer_Learning_Test_Script.xml | 4 +- .../_Stroop_Model_Learning_Test_Script.xml | 4 +- .../runConfigurations/_Stroop_Model_Test.xml | 6 +- PsyNeuLink/Components/Functions/Function.py | 123 ++- .../AdaptiveMechanisms/AdaptiveMechanism.py | 2 +- .../ControlMechanism/ControlMechanism.py | 882 ++++++++++-------- .../DefaultControlMechanism.py | 16 +- .../LearningMechanism/LearningMechanism.py | 44 +- PsyNeuLink/Components/Mechanisms/Mechanism.py | 16 +- .../ObjectiveMechanism.py | 585 +++++++++--- PsyNeuLink/Components/Process.py | 6 +- .../PathwayProjections/MappingProjection.py | 8 +- .../Components/Projections/Projection.py | 17 - PsyNeuLink/Components/States/InputState.py | 59 +- .../States/ModulatorySignals/ControlSignal.py | 209 ++++- PsyNeuLink/Components/States/OutputState.py | 5 +- PsyNeuLink/Components/States/State.py | 12 +- PsyNeuLink/Components/System.py | 674 ++++++++++--- PsyNeuLink/Globals/Keywords.py | 7 +- PsyNeuLink/Globals/Run.py | 14 +- .../ControlMechanisms/EVC/EVCAuxiliary.py | 82 +- .../ControlMechanisms/EVC/EVCMechanism.py | 849 ++++------------- .../ControlMechanisms/LCMechanism.py | 25 +- .../ComparatorMechanism.py | 2 +- PsyNeuLink/Scheduling/Scheduler.py | 5 +- PsyNeuLink/__init__.py | 3 +- Scripts/Scratch Pad.py | 100 +- ...VC System Laming Validation Test Script.py | 35 +- TODO List.py | 33 +- 31 files changed, 2205 insertions(+), 1628 deletions(-) diff --git a/.idea/runConfigurations/Make_HTML.xml b/.idea/runConfigurations/Make_HTML.xml index a456e8fa745..6ab01496677 100644 --- a/.idea/runConfigurations/Make_HTML.xml +++ b/.idea/runConfigurations/Make_HTML.xml @@ -3,7 +3,7 @@