diff --git a/psyneulink/core/components/component.py b/psyneulink/core/components/component.py index a1a990fbf0f..1b2aa3d7931 100644 --- a/psyneulink/core/components/component.py +++ b/psyneulink/core/components/component.py @@ -1652,6 +1652,7 @@ def _deferred_init(self, context=None): # Complete initialization # MODIFIED 10/27/18 OLD: super(self.__class__,self).__init__(**self._init_args) + # MODIFIED 10/27/18 NEW: FOLLOWING IS NEEDED TO HANDLE FUNCTION DEFERRED INIT (JDC) # try: # super(self.__class__,self).__init__(**self._init_args) @@ -1786,7 +1787,7 @@ def generate_error(param_name): for param_name in runtime_params: if not isinstance(param_name, str): generate_error(param_name) - elif hasattr(self, param_name): + elif param_name in self.parameters: if param_name in {FUNCTION, INPUT_PORTS, OUTPUT_PORTS}: generate_error(param_name) if context.execution_id not in self._runtime_params_reset: @@ -1797,7 +1798,7 @@ def generate_error(param_name): # Any remaining params should either belong to the Component's function # or, if the Component is a Function, to it or its owner elif ( # If Component is not a function, and its function doesn't have the parameter or - (not is_function_type(self) and not hasattr(self.function, param_name)) + (not is_function_type(self) and param_name not in self.function.parameters) # the Component is a standalone function: or (is_function_type(self) and not self.owner)): generate_error(param_name) @@ -2856,8 +2857,8 @@ def _instantiate_function(self, function, function_params=None, context=None): # KAM added 6/14/18 for functions that do not pass their has_initializers status up to their owner via property # FIX: need comprehensive solution for has_initializers; need to determine whether ports affect mechanism's # has_initializers status - if self.function.has_initializers: - self.has_initializers = True + if self.function.parameters.has_initializers._get(context): + self.parameters.has_initializers._set(True, context) self._parse_param_port_sources() diff --git a/psyneulink/core/components/functions/objectivefunctions.py b/psyneulink/core/components/functions/objectivefunctions.py index d32a0febeee..52afed17035 100644 --- a/psyneulink/core/components/functions/objectivefunctions.py +++ b/psyneulink/core/components/functions/objectivefunctions.py @@ -66,7 +66,7 @@ class Parameters(Function_Base.Parameters): :default value: False :type: ``bool`` """ - normalize = False + normalize = Parameter(False, stateful=False) metric = Parameter(None, stateful=False) @@ -205,7 +205,7 @@ class Parameters(ObjectiveFunction.Parameters): metric = Parameter(ENERGY, stateful=False) metric_fct = Parameter(None, stateful=False, loggable=False) transfer_fct = Parameter(None, stateful=False, loggable=False) - normalize = False + normalize = Parameter(False, stateful=False) @tc.typecheck def __init__(self, diff --git a/psyneulink/core/components/functions/optimizationfunctions.py b/psyneulink/core/components/functions/optimizationfunctions.py index 836e3f89c0b..91d88b11fa9 100644 --- a/psyneulink/core/components/functions/optimizationfunctions.py +++ b/psyneulink/core/components/functions/optimizationfunctions.py @@ -405,11 +405,11 @@ def _validate_params(self, request_set, target_set=None, context=None): if SEARCH_SPACE in request_set and request_set[SEARCH_SPACE] is not None: search_space = request_set[SEARCH_SPACE] - if not all(isinstance(s, (SampleIterator, type(None))) for s in search_space): + if not all(isinstance(s, (SampleIterator, type(None), list, tuple, np.ndarray)) for s in search_space): raise OptimizationFunctionError("All entries in list specified for {} arg of {} must be a {}". format(repr(SEARCH_SPACE), self.__class__.__name__, - SampleIterator.__name__)) + "SampleIterator, list, tuple, or ndarray")) if SEARCH_TERMINATION_FUNCTION in request_set and request_set[SEARCH_TERMINATION_FUNCTION] is not None: if not is_function_type(request_set[SEARCH_TERMINATION_FUNCTION]): @@ -495,7 +495,11 @@ def _function(self, self._unspecified_args = [] current_sample = self._check_args(variable=variable, context=context, params=params) - current_value = self.owner.objective_mechanism.parameters.value._get(context) if self.owner else 0. + + try: + current_value = self.owner.objective_mechanism.parameters.value._get(context) + except AttributeError: + current_value = 0 samples = [] values = [] @@ -766,6 +770,7 @@ class GradientOptimization(OptimizationFunction): """ componentName = GRADIENT_OPTIMIZATION_FUNCTION + bounds = None class Parameters(OptimizationFunction.Parameters): """ @@ -924,7 +929,7 @@ def reset(self, *args, context=None): super().reset(*args) # Differentiate objective_function using autograd.grad() - if OBJECTIVE_FUNCTION in args[0]: + if OBJECTIVE_FUNCTION in args[0] and not self.gradient_function: try: from autograd import grad self.gradient_function = grad(self.objective_function) diff --git a/psyneulink/core/components/functions/selectionfunctions.py b/psyneulink/core/components/functions/selectionfunctions.py index 700d7c9e395..2d1fea9950b 100644 --- a/psyneulink/core/components/functions/selectionfunctions.py +++ b/psyneulink/core/components/functions/selectionfunctions.py @@ -162,8 +162,6 @@ class OneHot(SelectionFunction): componentName = ONE_HOT_FUNCTION - bounds = None - classPreferences = { PREFERENCE_SET_NAME: 'OneHotClassPreferences', REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), diff --git a/psyneulink/core/components/functions/statefulfunctions/integratorfunctions.py b/psyneulink/core/components/functions/statefulfunctions/integratorfunctions.py index 6ba0764cd8a..531dd987a6c 100644 --- a/psyneulink/core/components/functions/statefulfunctions/integratorfunctions.py +++ b/psyneulink/core/components/functions/statefulfunctions/integratorfunctions.py @@ -245,8 +245,6 @@ def __init__(self, **kwargs ) - self.has_initializers = True - # FIX CONSIDER MOVING THIS TO THE LEVEL OF Function_Base OR EVEN Component def _validate_params(self, request_set, target_set=None, context=None): """Check inner dimension (length) of all parameters used for the function @@ -574,8 +572,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _accumulator_check_args(self, variable=None, context=None, params=None, target_set=None): """validate params and assign any runtime params. @@ -611,7 +607,7 @@ def _accumulator_check_args(self, variable=None, context=None, params=None, targ runtime_params = params if runtime_params: for param_name in runtime_params: - if hasattr(self, param_name): + if param_name in self.parameters: if param_name in {FUNCTION, INPUT_PORTS, OUTPUT_PORTS}: continue if context.execution_id not in self._runtime_params_reset: @@ -825,8 +821,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _function(self, variable=None, context=None, @@ -1060,8 +1054,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _validate_params(self, request_set, target_set=None, context=None): super()._validate_params( request_set=request_set, @@ -1594,8 +1586,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _validate_params(self, request_set, target_set=None, context=None): # Handle list or array for rate specification @@ -2047,8 +2037,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _validate_params(self, request_set, target_set=None, context=None): super()._validate_params(request_set=request_set, target_set=target_set,context=context) @@ -2440,8 +2428,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _validate_noise(self, noise): if noise is not None and not isinstance(noise, float) and not(isinstance(noise, np.ndarray) and np.issubdtype(noise.dtype, np.floating)): raise FunctionError( @@ -2858,8 +2844,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _validate_noise(self, noise): if noise is not None and not isinstance(noise, float): raise FunctionError( @@ -3140,8 +3124,6 @@ def __init__(self, prefs=prefs ) - self.has_initializers = True - def _function(self, variable=None, context=None, diff --git a/psyneulink/core/components/functions/statefulfunctions/memoryfunctions.py b/psyneulink/core/components/functions/statefulfunctions/memoryfunctions.py index b375772cae9..7563cfa5034 100644 --- a/psyneulink/core/components/functions/statefulfunctions/memoryfunctions.py +++ b/psyneulink/core/components/functions/statefulfunctions/memoryfunctions.py @@ -232,8 +232,6 @@ def __init__(self, prefs=prefs, ) - self.has_initializers = True - def _initialize_previous_value(self, initializer, context=None): initializer = initializer or [] previous_value = deque(initializer, maxlen=self.parameters.history.get(context)) @@ -731,8 +729,6 @@ def __init__(self, self.parameters.key_size._set(len(self.previous_value[KEYS][0]), Context()) self.parameters.val_size._set(len(self.previous_value[VALS][0]), Context()) - self.has_initializers = True - def _get_state_ids(self): return super()._get_state_ids() + ["ring_memory"] @@ -1000,8 +996,6 @@ def _instantiate_attributes_before_function(self, function=None, context=None): context ) - self.has_initializers = True - if isinstance(self.distance_function, type): self.distance_function = self.distance_function(context=context) diff --git a/psyneulink/core/components/functions/statefulfunctions/statefulfunction.py b/psyneulink/core/components/functions/statefulfunctions/statefulfunction.py index cbd4f95d3c1..5936cd30720 100644 --- a/psyneulink/core/components/functions/statefulfunctions/statefulfunction.py +++ b/psyneulink/core/components/functions/statefulfunctions/statefulfunction.py @@ -24,7 +24,7 @@ import numpy as np from psyneulink.core import llvm as pnlvm -from psyneulink.core.components.component import DefaultsFlexibility +from psyneulink.core.components.component import DefaultsFlexibility, _has_initializers_setter from psyneulink.core.components.functions.function import Function_Base, FunctionError from psyneulink.core.components.functions.distributionfunctions import DistributionFunction from psyneulink.core.globals.keywords import INITIALIZER, STATEFUL_FUNCTION_TYPE, STATEFUL_FUNCTION, NOISE, RATE @@ -194,7 +194,7 @@ class Parameters(Function_Base.Parameters): rate = Parameter(1.0, modulable=True) previous_value = Parameter(np.array([0]), pnl_internal=True) initializer = Parameter(np.array([0]), pnl_internal=True) - + has_initializers = Parameter(True, setter=_has_initializers_setter, pnl_internal=True) @handle_external_context() @tc.typecheck @@ -238,8 +238,6 @@ def __init__(self, **kwargs ) - self.has_initializers = True - def _validate(self, context=None): self._validate_rate(self.defaults.rate) self._validate_initializers(self.defaults.variable, context=context) @@ -444,15 +442,20 @@ def _instantiate_attributes_before_function(self, function=None, context=None): # use np.broadcast_to to guarantee that all initializer type attributes take on the same shape as variable if not np.isscalar(self.defaults.variable): for attr in self.initializers: - setattr(self, attr, np.broadcast_to(getattr(self, attr), self.defaults.variable.shape).copy()) + param = getattr(self.parameters, attr) + param._set( + np.broadcast_to( + param._get(context), + self.defaults.variable.shape + ).copy(), + context + ) # create all stateful attributes and initialize their values to the current values of their # corresponding initializer attributes for i, attr_name in enumerate(self.stateful_attributes): - initializer_value = getattr(self, self.initializers[i]).copy() - setattr(self, attr_name, initializer_value) - - self.has_initializers = True + initializer_value = getattr(self.parameters, self.initializers[i])._get(context).copy() + getattr(self.parameters, attr_name)._set(initializer_value, context) super()._instantiate_attributes_before_function(function=function, context=context) @@ -555,7 +558,7 @@ def reset(self, *args, context=None): setattr(self, attr, reinitialization_values[i]) getattr(self.parameters, attr).set(reinitialization_values[i], context, override=True) - value.append(getattr(self, self.stateful_attributes[i])) + value.append(getattr(self.parameters, self.stateful_attributes[i])._get(context)) self.parameters.value.set(value, context, override=True) return value diff --git a/psyneulink/core/components/functions/transferfunctions.py b/psyneulink/core/components/functions/transferfunctions.py index 72de681925d..77eb2a49748 100644 --- a/psyneulink/core/components/functions/transferfunctions.py +++ b/psyneulink/core/components/functions/transferfunctions.py @@ -190,9 +190,6 @@ class Identity(TransferFunction): # ------------------------------------------- componentName = IDENTITY_FUNCTION - bounds = None - - classPreferences = { PREFERENCE_SET_NAME: 'IdentityClassPreferences', REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), @@ -338,8 +335,6 @@ class Linear(TransferFunction): # --------------------------------------------- componentName = LINEAR_FUNCTION - bounds = None - classPreferences = { PREFERENCE_SET_NAME: 'LinearClassPreferences', REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), @@ -589,8 +584,6 @@ class Exponential(TransferFunction): # ---------------------------------------- componentName = EXPONENTIAL_FUNCTION - bounds = (0, None) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -624,6 +617,7 @@ class Parameters(TransferFunction.Parameters): bias = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM]) scale = Parameter(1.0, modulable=True) offset = Parameter(0.0, modulable=True) + bounds = (0, None) @tc.typecheck def __init__(self, @@ -856,10 +850,6 @@ class Logistic(TransferFunction): # ------------------------------------------- componentName = LOGISTIC_FUNCTION parameter_keywords.update({GAIN, BIAS, OFFSET}) - - bounds = (0, 1) - - _model_spec_class_name_is_generic = True class Parameters(TransferFunction.Parameters): @@ -902,6 +892,7 @@ class Parameters(TransferFunction.Parameters): bias = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM]) offset = Parameter(0.0, modulable=True) scale = Parameter(1.0, modulable=True) + bounds = (0, 1) @tc.typecheck def __init__(self, @@ -1165,8 +1156,6 @@ class Tanh(TransferFunction): # ----------------------------------------------- componentName = TANH_FUNCTION parameter_keywords.update({GAIN, BIAS, OFFSET}) - bounds = (0, 1) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -1207,6 +1196,7 @@ class Parameters(TransferFunction.Parameters): bias = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM]) offset = Parameter(0.0, modulable=True) scale = Parameter(1.0, modulable=True) + bounds = (0, 1) @tc.typecheck def __init__(self, @@ -1443,8 +1433,6 @@ class ReLU(TransferFunction): # ----------------------------------------------- componentName = RELU_FUNCTION parameter_keywords.update({GAIN, BIAS, LEAK}) - bounds = (None,None) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -1471,6 +1459,7 @@ class Parameters(TransferFunction.Parameters): gain = Parameter(1.0, modulable=True, aliases=[MULTIPLICATIVE_PARAM]) bias = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM]) leak = Parameter(0.0, modulable=True) + bounds = (None, None) @tc.typecheck def __init__(self, @@ -1683,8 +1672,6 @@ class Gaussian(TransferFunction): # ------------------------------------------- componentName = GAUSSIAN_FUNCTION # parameter_keywords.update({STANDARD_DEVIATION, BIAS, SCALE, OFFSET}) - bounds = (None,None) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -1718,6 +1705,7 @@ class Parameters(TransferFunction.Parameters): bias = Parameter(0.0, modulable=True, aliases=[ADDITIVE_PARAM]) scale = Parameter(1.0, modulable=True) offset = Parameter(0.0, modulable=True) + bounds = (None, None) @tc.typecheck def __init__(self, @@ -1949,8 +1937,6 @@ class GaussianDistort(TransferFunction): #------------------------------------- componentName = GAUSSIAN_DISTORT_FUNCTION # parameter_keywords.update({VARIANCE, BIAS, SCALE, OFFSET}) - bounds = (None,None) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -1991,6 +1977,7 @@ class Parameters(TransferFunction.Parameters): scale = Parameter(1.0, modulable=True) offset = Parameter(0.0, modulable=True) random_state = Parameter(None, stateful=True, loggable=False) + bounds = (None, None) @tc.typecheck def __init__(self, @@ -2227,8 +2214,6 @@ class SoftMax(TransferFunction): componentName = SOFTMAX_FUNCTION - bounds = (0, 1) - class Parameters(TransferFunction.Parameters): """ Attributes @@ -2653,8 +2638,6 @@ class LinearMatrix(TransferFunction): # --------------------------------------- componentName = LINEAR_MATRIX_FUNCTION - bounds = None - DEFAULT_FILLER_VALUE = 0 class Parameters(TransferFunction.Parameters): @@ -2699,7 +2682,10 @@ def __init__(self, prefs=prefs, ) - self.matrix = self.instantiate_matrix(self.matrix) + self.parameters.matrix.set( + self.instantiate_matrix(self.parameters.matrix.get()), + skip_log=True, + ) # def _validate_variable(self, variable, context=None): # """Insure that variable passed to LinearMatrix is a max 2D array @@ -2938,10 +2924,12 @@ def _instantiate_attributes_before_function(self, function=None, context=None): if isinstance(self.owner, Projection): self.receiver = self.defaults.variable - if self.matrix is None and not hasattr(self.owner, "receiver"): + matrix = self.parameters.matrix._get(context) + + if matrix is None and not hasattr(self.owner, "receiver"): variable_length = np.size(np.atleast_2d(self.defaults.variable), 1) - self.matrix = np.identity(variable_length) - self.matrix = self.instantiate_matrix(self.matrix) + matrix = np.identity(variable_length) + self.parameters.matrix._set(self.instantiate_matrix(matrix), context) def instantiate_matrix(self, specification, context=None): """Implements matrix indicated by specification @@ -3435,8 +3423,6 @@ class TransferWithCosts(TransferFunction): componentName = TRANSFER_WITH_COSTS_FUNCTION - bounds = None - classPreferences = { PREFERENCE_SET_NAME: 'TransferWithCostssClassPreferences', REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE), @@ -3842,11 +3828,6 @@ def _function(self, # Compute intensity_cost if enabled_cost_functions & CostFunctions.INTENSITY: - # Assign modulatory param values to intensity_cost_function - self.intensity_cost_fct_mult_param = \ - self._get_current_parameter_value(INTENSITY_COST_FCT_MULTIPLICATIVE_PARAM, context) - self.intensity_cost_fct_add_param = \ - self._get_current_parameter_value(INTENSITY_COST_FCT_ADDITIVE_PARAM, context) # Execute intensity_cost function intensity_cost = self.intensity_cost_fct(intensity, context=context) self.parameters.intensity_cost._set(intensity_cost, context) @@ -3859,11 +3840,6 @@ def _function(self, intensity_change = np.abs(intensity - self.parameters.intensity._get(context)) except TypeError: intensity_change = np.zeros_like(self.parameters_intensity._get(context)) - # Assign modulatory param values to adjustment_cost_function - self.adjustment_cost_fct_mult_param = \ - self._get_current_parameter_value(ADJUSTMENT_COST_FCT_MULTIPLICATIVE_PARAM, context) - self.adjustment_cost_fct_add_param = \ - self._get_current_parameter_value(ADJUSTMENT_COST_FCT_ADDITIVE_PARAM, context) # Execute adjustment_cost function adjustment_cost = self.adjustment_cost_fct(intensity_change, context=context) self.parameters.adjustment_cost._set(adjustment_cost, context) @@ -3871,23 +3847,12 @@ def _function(self, # Compute duration_cost if enabled_cost_functions & CostFunctions.DURATION: - # Assign modulatory param values to duration_cost_function - self.duration_cost_fct_mult_param = \ - self._get_current_parameter_value(DURATION_COST_FCT_MULTIPLICATIVE_PARAM, context) - self.duration_cost_fct_add_param = \ - self._get_current_parameter_value(DURATION_COST_FCT_ADDITIVE_PARAM, context) # Execute duration_cost function duration_cost = self.duration_cost_fct(intensity, context=context) self.parameters.duration_cost._set(duration_cost, context) enabled_costs.append(duration_cost) # Alwasy execute combined_costs_fct if *any* costs are enabled - - # Assign modulatory param values to combine_costs_function - self.combine_costs_fct_mult_param = \ - self._get_current_parameter_value(COMBINE_COSTS_FCT_MULTIPLICATIVE_PARAM, context) - self.combine_costs_fct_add_param = \ - self._get_current_parameter_value(COMBINE_COSTS_FCT_ADDITIVE_PARAM, context) # Execute combine_costs function combined_costs = self.combine_costs_fct(enabled_costs, context=context) diff --git a/psyneulink/core/components/mechanisms/mechanism.py b/psyneulink/core/components/mechanisms/mechanism.py index 76585c20d1f..9621d194de4 100644 --- a/psyneulink/core/components/mechanisms/mechanism.py +++ b/psyneulink/core/components/mechanisms/mechanism.py @@ -1066,6 +1066,7 @@ class `UserList `, and the second the current iteration of the `optimization process `); it must return `True` or `False`. - search_space : list or ndarray + search_space : iterable [list, tuple, ndarray, SampleSpec, or SampleIterator] | list, tuple, ndarray, SampleSpec, or SampleIterator specifies the `search_space ` parameter for `function `, unless that is specified in a constructor for `function - `. Each item must have the same shape as `control_allocation - `. + `. An element at index i should correspond to an element at index i in + `control_allocation `. If + `control_allocation ` contains only one element, then search_space can be + specified as a single element without an enclosing iterable. function : OptimizationFunction, function or method specifies the function used to optimize the `control_allocation `; @@ -680,9 +690,10 @@ class Parameters(ControlMechanism.Parameters): :default value: None :type: """ - function = Parameter(None, stateful=False, loggable=False) + function = Parameter(GridSearch, stateful=False, loggable=False) feature_function = Parameter(None, reference=True, stateful=False, loggable=False) search_function = Parameter(None, stateful=False, loggable=False) + search_space = Parameter(None, read_only=True) search_termination_function = Parameter(None, stateful=False, loggable=False) comp_execution_mode = Parameter('Python', stateful=False, loggable=False, pnl_internal=True) search_statefulness = Parameter(True, stateful=False, loggable=False) @@ -703,7 +714,7 @@ class Parameters(ControlMechanism.Parameters): ) num_estimates = None # search_space = None - control_allocation_search_space = None + control_allocation_search_space = Parameter(None, read_only=True, getter=_control_allocation_search_space_getter) saved_samples = None saved_values = None @@ -723,6 +734,8 @@ def __init__(self, **kwargs): """Implement OptimizationControlMechanism""" + function = function or GridSearch + # If agent_rep hasn't been specified, put into deferred init if agent_rep is None: if context.source==ContextFlags.COMMAND_LINE: @@ -842,13 +855,35 @@ def _instantiate_attributes_after_function(self, context=None): """Instantiate OptimizationControlMechanism's OptimizatonFunction attributes""" super()._instantiate_attributes_after_function(context=context) + + search_space = self.parameters.search_space._get(context) + if type(search_space) == np.ndarray: + search_space = search_space.tolist() + if search_space: + corrected_search_space = [] + try: + if type(search_space) == SampleIterator: + corrected_search_space.append(search_space) + elif type(search_space) == SampleSpec: + corrected_search_space.append(SampleIterator(search_space)) + else: + for i in self.parameters.search_space._get(context): + if not type(i) == SampleIterator: + corrected_search_space.append(SampleIterator(specification=i)) + continue + corrected_search_space.append(i) + except AssertionError: + corrected_search_space = [SampleIterator(specification=search_space)] + self.parameters.search_space._set(corrected_search_space, context) + # Assign parameters to function (OptimizationFunction) that rely on OptimizationControlMechanism - self.function.reset({DEFAULT_VARIABLE: self.control_allocation, - OBJECTIVE_FUNCTION: self.evaluation_function, - # SEARCH_FUNCTION: self.search_function, - # SEARCH_TERMINATION_FUNCTION: self.search_termination_function, - SEARCH_SPACE: self.control_allocation_search_space - }) + self.function.reset({ + DEFAULT_VARIABLE: self.parameters.control_allocation._get(context), + OBJECTIVE_FUNCTION: self.evaluation_function, + # SEARCH_FUNCTION: self.search_function, + # SEARCH_TERMINATION_FUNCTION: self.search_termination_function, + SEARCH_SPACE: self.parameters.control_allocation_search_space._get(context) + }) if isinstance(self.agent_rep, type): self.agent_rep = self.agent_rep() @@ -1027,7 +1062,7 @@ def _get_evaluate_output_struct_type(self, ctx): def _get_evaluate_alloc_struct_type(self, ctx): return pnlvm.ir.ArrayType(ctx.float_ty, - len(self.control_allocation_search_space)) + len(self.parameters.control_allocation_search_space.get())) def _gen_llvm_net_outcome_function(self, *, ctx, tags=frozenset()): assert "net_outcome" in tags @@ -1306,11 +1341,6 @@ def _parse_feature_specs(self, input_ports, feature_function, context=None): return parsed_features - @property - def control_allocation_search_space(self): - """Return list of SampleIterators for allocation_samples of control_signals""" - return [c.allocation_samples for c in self.control_signals] - @property def _model_spec_parameter_blacklist(self): # default_variable is hidden in constructor arguments, diff --git a/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py b/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py index 9e31b83e0ed..d5960ea5109 100644 --- a/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py +++ b/psyneulink/core/components/mechanisms/modulatory/learning/learningmechanism.py @@ -547,7 +547,7 @@ ADDITIVE, AFTER, ASSERT, CONTEXT, CONTROL_PROJECTIONS, ENABLED, INPUT_PORTS, \ LEARNED_PARAM, LEARNING, LEARNING_MECHANISM, LEARNING_PROJECTION, LEARNING_SIGNAL, LEARNING_SIGNALS, \ MATRIX, NAME, ONLINE, OUTPUT_PORT, OUTPUT_PORTS, OWNER_VALUE, PARAMS, PROJECTIONS, SAMPLE, PORT_TYPE, VARIABLE -from psyneulink.core.globals.parameters import Parameter +from psyneulink.core.globals.parameters import FunctionParameter, Parameter from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel from psyneulink.core.globals.utilities import ContentAddressableList, convert_to_np_array, is_numeric, parameter_spec, convert_to_list @@ -662,22 +662,6 @@ def _error_signal_getter(owning_component=None, context=None): except (TypeError, IndexError): return None -def _learning_mechanism_learning_rate_setter(value, owning_component=None, context=None): - try: - # this prevents overridding a specified value on the function with - # this mechanism's default during initialization - # these checks could be done universally if we make a special handler - # for these parameters that serve only as mirrors into function - # parameters of the same name - if ( - owning_component.initialization_status is not ContextFlags.INITIALIZING - or owning_component.parameters.learning_rate._user_specified - or not owning_component.function.parameters.learning_rate._user_specified - ): - owning_component.function.parameters.learning_rate._set(value, context) - except AttributeError: - pass - return value class LearningMechanism(ModulatoryMechanism_Base): """ @@ -979,7 +963,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters): error_matrix = Parameter(None, modulable=True) learning_signal = Parameter(None, read_only=True, getter=_learning_signal_getter) error_signal = Parameter(None, read_only=True, getter=_error_signal_getter) - learning_rate = Parameter(None, modulable=True, setter=_learning_mechanism_learning_rate_setter) + learning_rate = FunctionParameter(None) learning_enabled = True modulation = ADDITIVE input_ports = Parameter( @@ -1248,7 +1232,7 @@ def _instantiate_output_ports(self, context=None): variable=(OWNER_VALUE,0), params=params, reference_value=self.parameters.learning_signal._get(context), - modulation=self.modulation, + modulation=self.defaults.modulation, # port_spec=self.learning_signal) port_spec=learning_signal, context=context) diff --git a/psyneulink/core/components/mechanisms/processing/objectivemechanism.py b/psyneulink/core/components/mechanisms/processing/objectivemechanism.py index 516953cd0ea..282d6da61bd 100644 --- a/psyneulink/core/components/mechanisms/processing/objectivemechanism.py +++ b/psyneulink/core/components/mechanisms/processing/objectivemechanism.py @@ -761,15 +761,21 @@ def _instantiate_function_weights_and_exponents(self, context=None): DEFAULT_WEIGHT = 1 DEFAULT_EXPONENT = 1 - weights = [input_port.weight for input_port in self.input_ports] - exponents = [input_port.exponent for input_port in self.input_ports] + weights = [input_port.defaults.weight for input_port in self.input_ports] + exponents = [input_port.defaults.exponent for input_port in self.input_ports] - if hasattr(self.function, WEIGHTS): + if WEIGHTS in self.function.parameters: if any(weight is not None for weight in weights): - self.function.weights = [[weight or DEFAULT_WEIGHT] for weight in weights] - if hasattr(self.function, EXPONENTS): + self.function.parameters.weights._set( + [[weight or DEFAULT_WEIGHT] for weight in weights], + context + ) + if EXPONENTS in self.function.parameters: if any(exponent is not None for exponent in exponents): - self.function.exponents = [[exponent or DEFAULT_EXPONENT] for exponent in exponents] + self.function.parameters.exponents._set( + [[exponent or DEFAULT_EXPONENT] for exponent in exponents], + context + ) assert True # # MODIFIED 6/8/19 NEW: [JDC] @@ -800,14 +806,14 @@ def monitor(self): @property def monitor_weights_and_exponents(self): - if hasattr(self.function, WEIGHTS) and self.function.weights is not None: - weights = self.function.weights + if hasattr(self.function, WEIGHTS) and self.function.weights.base is not None: + weights = self.function.weights.base else: - weights = [input_port.weight for input_port in self.input_ports] - if hasattr(self.function, EXPONENTS) and self.function.exponents is not None: - exponents = self.function.exponents + weights = [input_port.weight.base for input_port in self.input_ports] + if hasattr(self.function, EXPONENTS) and self.function.exponents.base is not None: + exponents = self.function.exponents.base else: - exponents = [input_port.exponent for input_port in self.input_ports] + exponents = [input_port.exponent.base for input_port in self.input_ports] return [(w,e) for w, e in zip(weights,exponents)] @monitor_weights_and_exponents.setter diff --git a/psyneulink/core/components/mechanisms/processing/transfermechanism.py b/psyneulink/core/components/mechanisms/processing/transfermechanism.py index f2a0a736f4a..db186c2fc27 100644 --- a/psyneulink/core/components/mechanisms/processing/transfermechanism.py +++ b/psyneulink/core/components/mechanisms/processing/transfermechanism.py @@ -1359,7 +1359,7 @@ def _instantiate_parameter_ports(self, function=None, context=None): def _instantiate_attributes_before_function(self, function=None, context=None): super()._instantiate_attributes_before_function(function=function, context=context) - if self.initial_value is None: + if self.parameters.initial_value._get(context) is None: self.defaults.initial_value = copy.deepcopy(self.defaults.variable) self.parameters.initial_value._set(copy.deepcopy(self.defaults.variable), context) @@ -1393,88 +1393,86 @@ def _instantiate_integrator_function(self, variable, context): if self.integrator_function.owner is None: self.integrator_function.owner = self - if hasattr(self.integrator_function, NOISE): - fct_noise = np.array(self.integrator_function.noise) - mech_specified = not np.array_equal(mech_noise, np.array(self.class_defaults.noise)) - fct_specified = not np.array_equal(np.array(self.integrator_function.noise), - np.array(self.integrator_function.class_defaults.noise)) - - # Mechanism and function noise are not the same - if not np.array_equal(mech_noise, fct_noise): - # If function's noise was not specified, assign Mechanism's value to it - if not fct_specified: - self.integrator_function.parameters.noise._set(mech_noise, context) - # Otherwise, given precedence to function's value - else: - if mech_specified: - warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" - " the {} parameter ({}) for its {} ({}); the Function's value will be used.". - format(repr(NOISE), self.name, mech_noise, - repr(NOISE), self.integrator_function.noise, - repr(INTEGRATOR_FUNCTION), - self.integrator_function.__class__.__name__)) - # Assign funciton's noise to Mechanism - self.parameters.noise._set(self.integrator_function.noise, context) - - # KDM 12/21/18: validating here until a standard scheme is designed, because it's tested for - self._validate_params( - request_set={'noise': self.integrator_function.noise}, - target_set={'noise': self.integrator_function.noise}, - context=context - ) - - if hasattr(self.integrator_function, INITIALIZER): - fct_intlzr = np.array(self.integrator_function.initializer) - # Check against variable, as class.default is None, but initial_value assigned to variable before here - mech_specified = not np.array_equal(mech_init_val, np.array(self.defaults.variable)) - fct_specified = not np.array_equal(np.array(self.integrator_function.initializer), - np.array(self.integrator_function.class_defaults.initializer)) - - # Mechanism initial_value and function initializer are not the same - if not np.array_equal(mech_init_val, fct_intlzr): - # If function's initializer was not specified, assign Mechanism's initial_value to it - if not fct_specified: - self.integrator_function.parameters.initializer._set(initializer, context) - self.integrator_function._initialize_previous_value(initializer, context) - # Otherwise, give precedence to function's value - else: - if mech_specified: - warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" - " the {} parameter ({}) for its {} ({}); the Function's value will be used.". - format(repr(INITIAL_VALUE), self.name, mech_init_val, - repr(INITIALIZER), self.integrator_function.initializer, - repr(INTEGRATOR_FUNCTION), - self.integrator_function.__class__.__name__)) - # Assign function's initializer to Mechanism - self.parameters.initial_value._set(self.integrator_function.initializer, context) - - if hasattr(self.integrator_function, RATE): - fct_rate = np.array(self.integrator_function.rate) - mech_specified = not np.array_equal(mech_rate, np.array(self.class_defaults.integration_rate)) - fct_specified = not np.array_equal(np.array(self.integrator_function.rate), - np.array(self.integrator_function.class_defaults.rate)) - # Mechanism and function rate are not the same - if not np.array_equal(mech_rate, fct_rate): - # If function's rate was not specified, assign Mechanism's value to it - if not fct_specified: - self.integrator_function.parameters.rate._set(rate, context) - # Otherwise, warn and then give precedence to function's value - else: - if mech_specified: - warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" - " the {} parameter ({}) for its {} ({}); the Function's value will be used.". - format(repr(INTEGRATION_RATE), self.name, rate, - repr(RATE), self.integrator_function.rate, repr(INTEGRATOR_FUNCTION), - self.integrator_function.__class__.__name__)) - # Assign function's rate to Mechanism - self.parameters.integration_rate._set(self.integrator_function.rate, context) - - # KDM 12/21/18: validating here until a standard scheme is designed, because it's tested for - self._validate_params( - request_set={'integration_rate': self.integrator_function.rate}, - target_set={'integration_rate': self.integrator_function.rate}, - context=context - ) + fct_noise = self.integrator_function.parameters.noise._get(context) + mech_specified = not np.array_equal(mech_noise, np.array(self.class_defaults.noise)) + fct_specified = not np.array_equal(np.array(fct_noise), + np.array(self.integrator_function.class_defaults.noise)) + + # Mechanism and function noise are not the same + if not np.array_equal(mech_noise, np.array(fct_noise)): + # If function's noise was not specified, assign Mechanism's value to it + if not fct_specified: + self.integrator_function.parameters.noise._set(mech_noise, context) + # Otherwise, given precedence to function's value + else: + if mech_specified: + warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" + " the {} parameter ({}) for its {} ({}); the Function's value will be used.". + format(repr(NOISE), self.name, mech_noise, + repr(NOISE), fct_noise, + repr(INTEGRATOR_FUNCTION), + self.integrator_function.__class__.__name__)) + # Assign funciton's noise to Mechanism + self.parameters.noise._set(fct_noise, context) + + # KDM 12/21/18: validating here until a standard scheme is designed, because it's tested for + self._validate_params( + request_set={'noise': fct_noise}, + target_set={'noise': fct_noise}, + context=context + ) + + + fct_intlzr = np.array(self.integrator_function.parameters.initializer._get(context)) + # Check against variable, as class.default is None, but initial_value assigned to variable before here + mech_specified = not np.array_equal(mech_init_val, np.array(self.defaults.variable)) + fct_specified = not np.array_equal(np.array(fct_intlzr), + np.array(self.integrator_function.class_defaults.initializer)) + + # Mechanism initial_value and function initializer are not the same + if not np.array_equal(mech_init_val, fct_intlzr): + # If function's initializer was not specified, assign Mechanism's initial_value to it + if not fct_specified: + self.integrator_function.parameters.initializer._set(initializer, context) + self.integrator_function._initialize_previous_value(initializer, context) + # Otherwise, give precedence to function's value + else: + if mech_specified: + warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" + " the {} parameter ({}) for its {} ({}); the Function's value will be used.". + format(repr(INITIAL_VALUE), self.name, mech_init_val, + repr(INITIALIZER), fct_intlzr, + repr(INTEGRATOR_FUNCTION), + self.integrator_function.__class__.__name__)) + # Assign function's initializer to Mechanism + self.parameters.initial_value._set(fct_intlzr, context) + + fct_rate = np.array(self.integrator_function.parameters.rate._get(context)) + mech_specified = not np.array_equal(mech_rate, np.array(self.class_defaults.integration_rate)) + fct_specified = not np.array_equal(np.array(fct_rate), + np.array(self.integrator_function.class_defaults.rate)) + # Mechanism and function rate are not the same + if not np.array_equal(mech_rate, fct_rate): + # If function's rate was not specified, assign Mechanism's value to it + if not fct_specified: + self.integrator_function.parameters.rate._set(rate, context) + # Otherwise, warn and then give precedence to function's value + else: + if mech_specified: + warnings.warn("Specification of the {} argument for {} ({}) conflicts with specification of" + " the {} parameter ({}) for its {} ({}); the Function's value will be used.". + format(repr(INTEGRATION_RATE), self.name, rate, + repr(RATE), fct_rate, repr(INTEGRATOR_FUNCTION), + self.integrator_function.__class__.__name__)) + # Assign function's rate to Mechanism + self.parameters.integration_rate._set(fct_rate, context) + + # KDM 12/21/18: validating here until a standard scheme is designed, because it's tested for + self._validate_params( + request_set={'integration_rate': fct_rate}, + target_set={'integration_rate': fct_rate}, + context=context + ) # MODIFIED 6/24/19 NEW: # Insure that integrator_function's variable and value have same shape as TransferMechanism's variable @@ -1537,9 +1535,9 @@ def _get_integrated_function_input(self, function_variable, initial_value, noise context=context) # Update param assignments with ones determined to be relevant (mech vs. fct) # and assigned to integrator_function in _instantiate_integrator_function - initial_value = self.integrator_function.initializer - integration_rate = self.integrator_function.rate - noise = self.integrator_function.noise + initial_value = self.integrator_function.parameters.initializer._get(context) + integration_rate = self.integrator_function.parameters.rate._get(context) + noise = self.integrator_function.parameters.noise._get(context) self._needs_integrator_function_init = False current_input = self.integrator_function.execute(function_variable, diff --git a/psyneulink/core/components/ports/inputport.py b/psyneulink/core/components/ports/inputport.py index d2d6e54c555..16450ba82ef 100644 --- a/psyneulink/core/components/ports/inputport.py +++ b/psyneulink/core/components/ports/inputport.py @@ -1373,7 +1373,7 @@ def _instantiate_input_ports(owner, input_ports=None, reference_value=None, cont variable_item_is_OK = False for i, input_port in enumerate(owner.input_ports): try: - variable_item_is_OK = iscompatible(owner.defaults.variable[i], input_port.value) + variable_item_is_OK = iscompatible(owner.defaults.variable[i], input_port.defaults.value) if not variable_item_is_OK: break except IndexError: @@ -1382,7 +1382,7 @@ def _instantiate_input_ports(owner, input_ports=None, reference_value=None, cont if not variable_item_is_OK: old_variable = owner.defaults.variable - owner.defaults.variable = owner._handle_default_variable(default_variable=[port.value + owner.defaults.variable = owner._handle_default_variable(default_variable=[port.defaults.value for port in owner.input_ports]) if owner.verbosePref: diff --git a/psyneulink/core/components/ports/outputport.py b/psyneulink/core/components/ports/outputport.py index 691956607ae..39a07b2020f 100644 --- a/psyneulink/core/components/ports/outputport.py +++ b/psyneulink/core/components/ports/outputport.py @@ -1201,7 +1201,7 @@ def _get_port_function_value(owner, function, variable): is_PARAMS_DICT = False if fct_variable is None: try: - if owner.value is not None: + if owner.defaults.value is not None: fct_variable = owner.defaults.value[0] # Get owner's value by calling its function else: @@ -1394,7 +1394,7 @@ def _instantiate_output_ports(owner, output_ports=None, context=None): for item in owner_value))): pass else: - converted_to_2d = convert_to_np_array(owner.value, dimension=2) + converted_to_2d = convert_to_np_array(owner.defaults.value, dimension=2) # If owner_value is a list of heterogenous elements, use as is if converted_to_2d.dtype == object: owner_value = owner.defaults.value @@ -1427,11 +1427,11 @@ def _instantiate_output_ports(owner, output_ports=None, context=None): except AttributeError: index = output_port.index output_port_value = owner_value[index] - elif output_port.value is None: + elif output_port.defaults.value is None: output_port_value = output_port.function() else: - output_port_value = output_port.value + output_port_value = output_port.defaults.value else: # parse output_port diff --git a/psyneulink/core/components/ports/port.py b/psyneulink/core/components/ports/port.py index 4b3dafb036a..29f5290f11d 100644 --- a/psyneulink/core/components/ports/port.py +++ b/psyneulink/core/components/ports/port.py @@ -2602,9 +2602,9 @@ def _instantiate_port(port_type:_is_port_class, # Port's type # FIX: THIS SHOULD ONLY APPLY TO InputPort AND ParameterPort; WHAT ABOUT OutputPort? # Port's assigned value is incompatible with its reference_value (presumably its owner Mechanism's variable) reference_value = reference_value if reference_value is not None else port.reference_value - if not iscompatible(port.value, reference_value): + if not iscompatible(port.defaults.value, reference_value): raise PortError("{}'s value attribute ({}) is incompatible with the {} ({}) of its owner ({})". - format(port.name, port.value, REFERENCE_VALUE, reference_value, owner.name)) + format(port.name, port.defaults.value, REFERENCE_VALUE, reference_value, owner.name)) # Port has already been assigned to an owner if port.owner is not None and port.owner is not owner: diff --git a/psyneulink/core/components/projections/modulatory/learningprojection.py b/psyneulink/core/components/projections/modulatory/learningprojection.py index 00c5e35764a..ef44f6a0aa6 100644 --- a/psyneulink/core/components/projections/modulatory/learningprojection.py +++ b/psyneulink/core/components/projections/modulatory/learningprojection.py @@ -474,8 +474,8 @@ def __init__(self, # replaces similar code in _instantiate_sender try: - if sender.owner.learning_rate is not None: - learning_rate = sender.owner.learning_rate + if sender.owner.defaults.learning_rate is not None: + learning_rate = sender.owner.defaults.learning_rate except AttributeError: pass diff --git a/psyneulink/core/components/projections/pathway/mappingprojection.py b/psyneulink/core/components/projections/pathway/mappingprojection.py index bd8027fdc7d..39a7e322708 100644 --- a/psyneulink/core/components/projections/pathway/mappingprojection.py +++ b/psyneulink/core/components/projections/pathway/mappingprojection.py @@ -606,7 +606,10 @@ def _instantiate_receiver(self, context=None): receiver_len, self.receiver.owner.name)) - self.matrix = get_matrix(matrix_spec, mapping_input_len, receiver_len, context=context) + self.parameters.matrix._set( + get_matrix(matrix_spec, mapping_input_len, receiver_len, context=context), + context + ) # Since matrix shape has changed, output of self.function may have changed, so update value self._instantiate_value(context=context) diff --git a/psyneulink/core/globals/parameters.py b/psyneulink/core/globals/parameters.py index ae3a02fc7bd..1bb64b2a86f 100644 --- a/psyneulink/core/globals/parameters.py +++ b/psyneulink/core/globals/parameters.py @@ -272,6 +272,7 @@ def _recurrent_transfer_mechanism_matrix_setter(value, owning_component=None, co import collections import copy +import itertools import logging import types import typing @@ -415,6 +416,9 @@ def __del__(self): except (AttributeError, KeyError): pass + def __contains__(self, item): + return item in itertools.chain.from_iterable(self.values(show_all=True).items()) + def __iter__(self): return iter([getattr(self, k) for k in self.values(show_all=True).keys()]) diff --git a/psyneulink/core/globals/sampleiterator.py b/psyneulink/core/globals/sampleiterator.py index f2bef2d81f2..9d12965a231 100644 --- a/psyneulink/core/globals/sampleiterator.py +++ b/psyneulink/core/globals/sampleiterator.py @@ -262,9 +262,8 @@ class SampleIterator(Iterator): """ - @tc.typecheck def __init__(self, - specification:tc.any(*allowable_specs)): + specification): """ Arguments @@ -315,6 +314,9 @@ def __init__(self, specification = SampleSpec(function=specification) + elif isinstance(specification, np.ndarray): + specification = specification.tolist() + if isinstance(specification, list): self.start = specification[0] self.stop = None diff --git a/psyneulink/core/llvm/execution.py b/psyneulink/core/llvm/execution.py index 7f3b0ab2ff7..746cc3321b3 100644 --- a/psyneulink/core/llvm/execution.py +++ b/psyneulink/core/llvm/execution.py @@ -368,7 +368,7 @@ def _copy_params_to_pnl(self, context=None, component=None, params=None): if attribute == 'matrix': # special case since we have to unflatten matrix # FIXME: this seems to break something when generalized for all attributes - value = np.array(value).reshape(component.matrix.shape) + value = np.array(value).reshape(component.parameters.matrix._get(context).shape) to_set._set(value, context=context) @property diff --git a/psyneulink/library/components/mechanisms/modulatory/control/agt/lccontrolmechanism.py b/psyneulink/library/components/mechanisms/modulatory/control/agt/lccontrolmechanism.py index 262e07b14c8..749412881c0 100644 --- a/psyneulink/library/components/mechanisms/modulatory/control/agt/lccontrolmechanism.py +++ b/psyneulink/library/components/mechanisms/modulatory/control/agt/lccontrolmechanism.py @@ -662,7 +662,7 @@ class Parameters(ControlMechanism.Parameters): base_level_gain = Parameter(0.5, modulable=True) scaling_factor_gain = Parameter(3.0, modulable=True) - modulated_mechanisms = None + modulated_mechanisms = Parameter(None, stateful=False, loggable=False) @tc.typecheck def __init__(self, diff --git a/psyneulink/library/components/mechanisms/modulatory/learning/autoassociativelearningmechanism.py b/psyneulink/library/components/mechanisms/modulatory/learning/autoassociativelearningmechanism.py index 85410aa43e2..32114d60536 100644 --- a/psyneulink/library/components/mechanisms/modulatory/learning/autoassociativelearningmechanism.py +++ b/psyneulink/library/components/mechanisms/modulatory/learning/autoassociativelearningmechanism.py @@ -315,12 +315,11 @@ class Parameters(LearningMechanism.Parameters): read_only=True, structural=True, ) + learning_type = LearningType.UNSUPERVISED + learning_timing = LearningTiming.EXECUTION_PHASE classPreferenceLevel = PreferenceLevel.TYPE - learning_type = LearningType.UNSUPERVISED - learning_timing = LearningTiming.EXECUTION_PHASE - @tc.typecheck def __init__(self, default_variable:tc.any(list, np.ndarray), @@ -362,20 +361,6 @@ def __init__(self, def _parse_function_variable(self, variable, context=None): return variable - def _instantiate_attributes_after_function(self, context=None): - super(AutoAssociativeLearningMechanism, self)._instantiate_attributes_after_function(context=context) - # KAM 2/27/19 added the line below to set the learning rate of the hebbian learning function to the learning - # rate value passed into RecurrentTransfermechanism - if self.learning_rate: - self.function.learning_rate = self.learning_rate - - def _instantiate_attributes_after_function(self, context=None): - super(AutoAssociativeLearningMechanism, self)._instantiate_attributes_after_function(context=context) - # KAM 2/27/19 added the line below to set the learning rate of the hebbian learning function to the learning - # rate value passed into RecurrentTransfermechanism - if self.learning_rate: - self.function.learning_rate = self.learning_rate - def _validate_variable(self, variable, context=None): """Validate that variable has only one item: activation_input. """ diff --git a/psyneulink/library/components/mechanisms/modulatory/learning/kohonenlearningmechanism.py b/psyneulink/library/components/mechanisms/modulatory/learning/kohonenlearningmechanism.py index dea5294a497..c3a2663da9c 100644 --- a/psyneulink/library/components/mechanisms/modulatory/learning/kohonenlearningmechanism.py +++ b/psyneulink/library/components/mechanisms/modulatory/learning/kohonenlearningmechanism.py @@ -276,9 +276,6 @@ class KohonenLearningMechanism(LearningMechanism): classPreferenceLevel = PreferenceLevel.TYPE - learning_type = LearningType.UNSUPERVISED - learning_timing = LearningTiming.EXECUTION_PHASE - class Parameters(LearningMechanism.Parameters): """ Attributes diff --git a/psyneulink/library/components/mechanisms/processing/transfer/contrastivehebbianmechanism.py b/psyneulink/library/components/mechanisms/processing/transfer/contrastivehebbianmechanism.py index 2a865770ee0..8661618d036 100644 --- a/psyneulink/library/components/mechanisms/processing/transfer/contrastivehebbianmechanism.py +++ b/psyneulink/library/components/mechanisms/processing/transfer/contrastivehebbianmechanism.py @@ -1235,10 +1235,10 @@ def _execute(self, # both the integrator_function's previous_value # and the Mechanism's current activity (which is returned as its input) if not self.continuous and self.parameters.integrator_mode._get(context): - self.reset(self.initial_value, context=context) + self.reset(self.parameters.initial_value._get(context), context=context) self.parameters.current_activity._set(self.parameters.initial_value._get(context), context) - self.parameters.current_termination_threshold._set(self.plus_phase_termination_threshold, context) - self.parameters.current_termination_condition._set(self.plus_phase_termination_condition, context) + self.parameters.current_termination_threshold._set(self.parameters.plus_phase_termination_threshold._get(context), context) + self.parameters.current_termination_condition._set(self.parameters.plus_phase_termination_condition._get(context), context) # Switch execution_phase self.parameters.execution_phase._set(not self.parameters.execution_phase._get(context), context) @@ -1299,20 +1299,21 @@ def delta(self, value=NotImplemented, context=None): @handle_external_context() def is_converged(self, value=NotImplemented, context=None): + phase_convergence_threshold = self.parameters.phase_convergence_threshold._get(context) # Check for convergence if ( - self.phase_convergence_threshold is not None + phase_convergence_threshold is not None and self.parameters.value.get_previous(context) is not None and self.initialization_status != ContextFlags.INITIALIZING ): - if self.delta(value, context) <= self.phase_convergence_threshold: + if self.delta(value, context) <= phase_convergence_threshold: return True elif self.get_current_execution_time(context).pass_ >= self.max_passes: phase_str = repr('PLUS_PHASE') if self.parameters.execution_phase._get(context) == PLUS_PHASE \ else repr('MINUS_PHASE') raise ContrastiveHebbianError(f"Maximum number of executions ({self.max_passes}) has occurred " f"before reaching convergence_threshold " - f"({self.phase_convergence_threshold}) for {self.name} in " + f"({phase_convergence_threshold}) for {self.name} in " f"{phase_str} of trial {self.get_current_execution_time(context).trial} " f"of run {self.get_current_execution_time(context).run}.") else: diff --git a/psyneulink/library/components/mechanisms/processing/transfer/kohonenmechanism.py b/psyneulink/library/components/mechanisms/processing/transfer/kohonenmechanism.py index 24e543224f0..3fa4891db09 100644 --- a/psyneulink/library/components/mechanisms/processing/transfer/kohonenmechanism.py +++ b/psyneulink/library/components/mechanisms/processing/transfer/kohonenmechanism.py @@ -397,7 +397,7 @@ def configure_learning(self, self._learning_enable_deferred = True return - self.matrix = self.learned_projection.parameter_ports[MATRIX] + self.parameters.matrix._set(self.learned_projection.parameter_ports[MATRIX], context) self.learning_mechanism = self._instantiate_learning_mechanism(learning_function=self.learning_function, learning_rate=self.learning_rate, diff --git a/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py b/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py index 58d5e24c0be..a498ed5cf49 100644 --- a/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py +++ b/psyneulink/library/components/mechanisms/processing/transfer/recurrenttransfermechanism.py @@ -180,6 +180,7 @@ """ +import copy import itertools import numbers import numpy as np @@ -612,7 +613,7 @@ class Parameters(TransferMechanism.Parameters): matrix = Parameter(HOLLOW_MATRIX, modulable=True, getter=_recurrent_transfer_mechanism_matrix_getter, setter=_recurrent_transfer_mechanism_matrix_setter) auto = Parameter(1, modulable=True) hetero = Parameter(0, modulable=True) - combination_function = LinearCombination + combination_function = Parameter(LinearCombination, stateful=False, loggable=False) smoothing_factor = Parameter(0.5, modulable=True) enable_learning = False # learning_function is a reference because it is used for @@ -851,7 +852,7 @@ def _instantiate_attributes_before_function(self, function=None, context=None): raise RecurrentTransferError("Matrix parameter ({}) for {} failed to produce a suitable matrix: " "if the matrix parameter does not produce a suitable matrix, the " "'auto' and 'hetero' parameters must be specified; currently, either" - "auto or hetero parameter is missing.".format(self.matrix, self)) + "auto or hetero parameter is missing.".format(self.parameters.matrix._get(context), self)) if AUTO not in param_keys and HETERO in param_keys: d = np.diagonal(matrix).copy() @@ -862,7 +863,7 @@ def _instantiate_attributes_before_function(self, function=None, context=None): reference_value_name=AUTO, params=None, context=context) - self.auto = d + self.parameters.auto._set(d, context) if port is not None: self._parameter_ports[AUTO] = port port.source = self.parameters.auto @@ -873,7 +874,7 @@ def _instantiate_attributes_before_function(self, function=None, context=None): m = matrix.copy() np.fill_diagonal(m, 0.0) - self.hetero = m + self.parameters.hetero._set(m, context) port = _instantiate_port(owner=self, port_type=ParameterPort, name=HETERO, @@ -938,9 +939,9 @@ def _instantiate_attributes_before_function(self, function=None, context=None): else: self.combination_function = comb_fct - if self.auto is None and self.hetero is None: - self.matrix = matrix - if self.matrix is None: + if self.parameters.auto._get(context) is None and self.parameters.hetero._get(context) is None: + self.parameters.matrix._set(matrix, context) + if self.parameters.matrix._get(context) is None: raise RecurrentTransferError("PROGRAM ERROR: Failed to instantiate \'matrix\' param for {}". format(self.__class__.__name__)) @@ -951,14 +952,15 @@ def _instantiate_attributes_after_function(self, context=None): super()._instantiate_attributes_after_function(context=context) + matrix = self.parameters.matrix._get(context) # (7/19/17 CW) this line of code is now questionable, given the changes to matrix and the recurrent projection - if isinstance(self.matrix, AutoAssociativeProjection): - self.recurrent_projection = self.matrix + if isinstance(matrix, AutoAssociativeProjection): + self.recurrent_projection = matrix # IMPLEMENTATION NOTE: THESE SHOULD BE MOVED TO COMPOSITION WHEN THAT IS IMPLEMENTED else: self.recurrent_projection = self._instantiate_recurrent_projection(self, - matrix=self.matrix, + matrix=matrix, context=context) # creating a recurrent_projection changes the default variable shape @@ -1113,7 +1115,7 @@ def _instantiate_learning_mechanism(self, matrix, context=None): - learning_mechanism = AutoAssociativeLearningMechanism(default_variable=[activity_vector.value], + learning_mechanism = AutoAssociativeLearningMechanism(default_variable=copy.deepcopy([activity_vector.defaults.value]), # learning_signals=[self.recurrent_projection], function=learning_function, learning_rate=learning_rate, diff --git a/psyneulink/library/compositions/pytorchllvmhelper.py b/psyneulink/library/compositions/pytorchllvmhelper.py index 97856c497df..6a6bafccd26 100644 --- a/psyneulink/library/compositions/pytorchllvmhelper.py +++ b/psyneulink/library/compositions/pytorchllvmhelper.py @@ -1,8 +1,6 @@ from psyneulink.core import llvm as pnlvm -__all__ = ["gen_inject_unary_function_call", - "gen_inject_vec_copy", - "gen_inject_vec_binop", +__all__ = ["gen_inject_vec_binop", "gen_inject_vec_add", "gen_inject_vec_sub", "gen_inject_vec_hadamard", @@ -14,38 +12,11 @@ "gen_inject_vxm", "gen_inject_vxm_transposed"] -def gen_inject_unary_function_call(ctx, builder, unary_func, vector, output_vec=None): - dim = len(vector.type.pointee) - if output_vec is None: - output_vec = builder.alloca(pnlvm.ir.types.ArrayType(ctx.float_ty, dim)) - assert len(output_vec.type.pointee) == dim - - # Get the pointer to the first element of the array to convert from [? x double]* -> double* - vec_in = builder.gep(vector, [ctx.int32_ty(0), ctx.int32_ty(0)]) - vec_out = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) - - builder.call(unary_func, [vec_in, ctx.int32_ty(dim), vec_out]) - return output_vec - -def gen_inject_vec_copy(ctx, builder, vector, output_vec=None): - dim = len(vector.type.pointee) - if output_vec is None: - output_vec = builder.alloca(pnlvm.ir.types.ArrayType(ctx.float_ty, dim)) - assert len(output_vec.type.pointee) == dim - - # Get the pointer to the first element of the array to convert from [? x double]* -> double* - vec_in = builder.gep(vector, [ctx.int32_ty(0), ctx.int32_ty(0)]) - vec_out = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) - - builtin = ctx.import_llvm_function("__pnl_builtin_vec_copy") - builder.call(builtin, [vec_in, ctx.int32_ty(dim), vec_out]) - return output_vec - def gen_inject_vec_binop(ctx, builder, op, u, v, output_vec=None): dim = len(u.type.pointee) assert len(v.type.pointee) == dim if output_vec is None: - output_vec = builder.alloca(pnlvm.ir.types.ArrayType(ctx.float_ty, dim)) + output_vec = builder.alloca(u.type.pointee) assert len(output_vec.type.pointee) == dim # Get the pointer to the first element of the array to convert from [? x double]* -> double* @@ -71,17 +42,20 @@ def gen_inject_mat_binop(ctx, builder, op, m1, m2, output_mat=None): assert len(m2.type.pointee) == x and len(m2.type.pointee.element) == y if output_mat is None: - output_mat = builder.alloca( - pnlvm.ir.types.ArrayType( - pnlvm.ir.types.ArrayType(ctx.float_ty, y), x)) + output_mat = builder.alloca(m1.type.pointee) assert len(output_mat.type.pointee) == x assert len(output_mat.type.pointee.element) == y + m1_ptr = builder.gep(m1, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + m2_ptr = builder.gep(m2, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + output_ptr = builder.gep(output_mat, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + builtin = ctx.import_llvm_function(op) - builder.call(builtin, [builder.bitcast(m1, ctx.float_ty.as_pointer()), - builder.bitcast(m2, ctx.float_ty.as_pointer()), - ctx.int32_ty(x), ctx.int32_ty(y), - builder.bitcast(output_mat, ctx.float_ty.as_pointer())]) + builder.call(builtin, [m1_ptr, + m2_ptr, + ctx.int32_ty(x), + ctx.int32_ty(y), + output_ptr]) return output_mat def gen_inject_mat_add(ctx, builder, m1, m2, output_mat=None): @@ -96,51 +70,67 @@ def gen_inject_mat_hadamard(ctx, builder, m1, m2, output_mat=None): def gen_inject_mat_scalar_mult(ctx, builder, m1, s, output_mat=None): x = len(m1.type.pointee) y = len(m1.type.pointee.element) + if output_mat is None: output_mat = builder.alloca( pnlvm.ir.types.ArrayType( pnlvm.ir.types.ArrayType(ctx.float_ty, y), x)) + assert len(output_mat.type.pointee) == x assert len(output_mat.type.pointee.element) == y + m1_ptr = builder.gep(m1, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + output_ptr = builder.gep(output_mat, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + builtin = ctx.import_llvm_function("__pnl_builtin_mat_scalar_mult") - builder.call(builtin, [builder.bitcast(m1, ctx.float_ty.as_pointer()), - s, ctx.int32_ty(x), ctx.int32_ty(y), - builder.bitcast(output_mat, ctx.float_ty.as_pointer())]) + builder.call(builtin, [m1_ptr, + s, + ctx.int32_ty(x), + ctx.int32_ty(y), + output_ptr]) return output_mat -def gen_inject_vxm(ctx, builder, m1, m2, output_vec=None): +def gen_inject_vxm(ctx, builder, v, m2, output_vec=None): y = len(m2.type.pointee) z = len(m2.type.pointee.element) - assert len(m1.type.pointee) == y + assert len(v.type.pointee) == y + # create output vec if output_vec is None: output_vec = builder.alloca(pnlvm.ir.types.ArrayType(ctx.float_ty, z)) assert len(output_vec.type.pointee) == z # Get the pointer to the first element of the array to convert from [? x double]* -> double* - v = builder.gep(m1, [ctx.int32_ty(0), ctx.int32_ty(0)]) - out = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) + v_ptr = builder.gep(v, [ctx.int32_ty(0), ctx.int32_ty(0)]) + m2_ptr = builder.gep(m2, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + output_ptr = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) builtin = ctx.import_llvm_function("__pnl_builtin_vxm") - builder.call(builtin, [v, builder.bitcast(m2, ctx.float_ty.as_pointer()), - ctx.int32_ty(y), ctx.int32_ty(z), out]) + builder.call(builtin, [v_ptr, + m2_ptr, + ctx.int32_ty(y), + ctx.int32_ty(z), + output_ptr]) return output_vec -def gen_inject_vxm_transposed(ctx, builder, m1, m2, output_vec=None): +def gen_inject_vxm_transposed(ctx, builder, v, m2, output_vec=None): y = len(m2.type.pointee) z = len(m2.type.pointee.element) - assert len(m1.type.pointee) == z + assert len(v.type.pointee) == z # create output vec if output_vec is None: output_vec = builder.alloca(pnlvm.ir.types.ArrayType(ctx.float_ty, y)) assert len(output_vec.type.pointee) == y # Get the pointer to the first element of the array to convert from [? x double]* -> double* - v = builder.gep(m1, [ctx.int32_ty(0), ctx.int32_ty(0)]) - out = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) + v_ptr = builder.gep(v, [ctx.int32_ty(0), ctx.int32_ty(0)]) + m2_ptr = builder.gep(m2, [ctx.int32_ty(0), ctx.int32_ty(0), ctx.int32_ty(0)]) + output_ptr = builder.gep(output_vec, [ctx.int32_ty(0), ctx.int32_ty(0)]) builtin = ctx.import_llvm_function("__pnl_builtin_vxm_transposed") - builder.call(builtin, [v, builder.bitcast(m2, ctx.float_ty.as_pointer()), - ctx.int32_ty(y), ctx.int32_ty(z), out]) + builder.call(builtin, [v_ptr, + m2_ptr, + ctx.int32_ty(y), + ctx.int32_ty(z), + output_ptr]) return output_vec diff --git a/psyneulink/library/compositions/pytorchmodelcreator.py b/psyneulink/library/compositions/pytorchmodelcreator.py index 5f5a7fb872a..6307afdbda9 100644 --- a/psyneulink/library/compositions/pytorchmodelcreator.py +++ b/psyneulink/library/compositions/pytorchmodelcreator.py @@ -67,14 +67,6 @@ def __init__(self, composition, device, context=None): # 3) Remove empty execution sets self.execution_sets = [x for x in self.execution_sets if len(x) > 0] - # gets the index of 'afferent_node' in the forward info weights list - def _get_afferent_node_index(self, node, afferent_node): - return [proj.receiver for proj in node.afferents].index(self.component_map[afferent_node]) - - def _get_afferent_nodes(self, node): - forward_info_weights = self.component_map[node].afferents - return [(vertex.component, weights) for (vertex, weights) in forward_info_weights.items()] - # generates llvm function for self.forward def _gen_llvm_function(self, *, ctx:pnlvm.LLVMBuilderContext, tags:frozenset): args = [ctx.get_state_struct_type(self._composition).as_pointer(), @@ -327,14 +319,6 @@ def copy_weights_to_psyneulink(self, context=None): projection.parameter_ports['matrix'].parameters.value._set( pytorch_rep.matrix.detach().cpu().numpy(), context) - def copy_outputs_to_psyneulink(self, outputs, context=None): - for component, value in outputs.items(): - detached_value = value.detach().cpu().numpy() - component.parameters.value._set( - detached_value, context, skip_history=True, skip_log=True) - component.output_port.parameters.value._set( - detached_value, context, skip_history=True, skip_log=True) - def log_weights(self): for proj in self.projections: proj.log_matrix() diff --git a/tests/composition/test_control.py b/tests/composition/test_control.py index 0ae5c7f7b4b..0b2c4339409 100644 --- a/tests/composition/test_control.py +++ b/tests/composition/test_control.py @@ -355,12 +355,15 @@ def test_agent_rep_assignement_as_controller_and_replacement(self): comp = pnl.Composition(name='comp', pathways=[mech], controller=pnl.OptimizationControlMechanism(agent_rep=None, - control_signals=(pnl.SLOPE, mech))) + control_signals=(pnl.SLOPE, mech), + search_space=[1])) assert comp.controller.composition == comp assert any(pnl.SLOPE in p_name for p_name in comp.projections.names) assert not any(pnl.INTERCEPT in p_name for p_name in comp.projections.names) - new_ocm = pnl.OptimizationControlMechanism(agent_rep=None, control_signals=(pnl.INTERCEPT, mech)) + new_ocm = pnl.OptimizationControlMechanism(agent_rep=None, + control_signals=(pnl.INTERCEPT, mech), + search_space=[1]) old_ocm = comp.controller comp.add_controller(new_ocm) @@ -1064,6 +1067,89 @@ def test_control_of_mech_port(self, mode): class TestModelBasedOptimizationControlMechanisms: + def test_ocm_default_function(self): + a = pnl.ProcessingMechanism() + comp = pnl.Composition( + controller_mode=pnl.BEFORE, + nodes=[a], + controller=pnl.OptimizationControlMechanism( + control=pnl.ControlSignal( + modulates=(pnl.SLOPE, a), + intensity_cost_function=lambda x: 0, + adjustment_cost_function=lambda x: 0, + allocation_samples=[1, 10] + ), + features=[a.input_port], + objective_mechanism=pnl.ObjectiveMechanism( + monitor=[a.output_port] + ), + ) + ) + assert type(comp.controller.function) == pnl.GridSearch + assert comp.run([1]) == [10] + + def test_ocm_searchspace_arg(self): + a = pnl.ProcessingMechanism() + comp = pnl.Composition( + controller_mode=pnl.BEFORE, + nodes=[a], + controller=pnl.OptimizationControlMechanism( + control=pnl.ControlSignal( + modulates=(pnl.SLOPE, a), + intensity_cost_function=lambda x: 0, + adjustment_cost_function=lambda x: 0, + ), + features=[a.input_port], + objective_mechanism=pnl.ObjectiveMechanism( + monitor=[a.output_port] + ), + search_space=[pnl.SampleIterator([1, 10])] + ) + ) + assert type(comp.controller.function) == pnl.GridSearch + assert comp.run([1]) == [10] + + @pytest.mark.parametrize("format,nested", + [("list", True), ("list", False), + ("tuple", True), ("tuple", False), + ("SampleIterator", True), ("SampleIterator", False), + ("SampleSpec", True), ("SampleSpec", False), + ("ndArray", True), ("ndArray", False), + ],) + def test_ocm_searchspace_format_equivalence(self, format, nested): + if format == "list": + search_space = [1, 10] + elif format == "tuple": + search_space = (1, 10) + elif format == "SampleIterator": + search_space = SampleIterator((1,10)) + elif format == "SampleSpec": + search_space = SampleSpec(1, 10, 9) + elif format == "ndArray": + search_space = np.array((1, 10)) + + if nested: + search_space = [search_space] + + a = pnl.ProcessingMechanism() + comp = pnl.Composition( + controller_mode=pnl.BEFORE, + nodes=[a], + controller=pnl.OptimizationControlMechanism( + control=pnl.ControlSignal( + modulates=(pnl.SLOPE, a), + intensity_cost_function=lambda x: 0, + adjustment_cost_function=lambda x: 0, + ), + features=[a.input_port], + objective_mechanism=pnl.ObjectiveMechanism( + monitor=[a.output_port] + ), + search_space=search_space + ) + ) + assert type(comp.controller.function) == pnl.GridSearch + assert comp.run([1]) == [10] def test_evc(self): # Mechanisms diff --git a/tests/composition/test_interfaces.py b/tests/composition/test_interfaces.py index dfaabe30e20..c4b48f2a410 100644 --- a/tests/composition/test_interfaces.py +++ b/tests/composition/test_interfaces.py @@ -558,7 +558,8 @@ def test_parameter_CIM_port_order(self): ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, ia)]), - ] + ], + search_space=[[1], [1], [1]] ) ocomp.add_controller(ocm) @@ -598,6 +599,9 @@ def test_nested_control_projection_count_controller(self): ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, ia)]), + ], + search_space=[ + [1], [1], [1] ] ) ocomp.add_controller(ocm) diff --git a/tests/composition/test_show_graph.py b/tests/composition/test_show_graph.py index 9594c4e3fdb..baf0f184eb4 100644 --- a/tests/composition/test_show_graph.py +++ b/tests/composition/test_show_graph.py @@ -187,7 +187,8 @@ def test_no_nested_and_controler_name_with_space_in_it(self): ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, ib)]) - ]) + ], + search_space=[[1],[1],[1]]) comp = Composition(name='ocomp', pathways=[ia, ib], controller=ocm) gv = comp.show_graph(show_controller=False, output_fmt='source') @@ -277,8 +278,9 @@ def test_nested_learning_test_with_user_specified_target_in_outer_composition(se name='CONTROLLER', objective_mechanism=ObjectiveMechanism(name='OBJECTIVE MECHANISM', monitor=[input_mech, output_mech]), - control=(SLOPE, internal_mech)) - ) + control=(SLOPE, internal_mech), + search_space=[1] + )) ocomp.add_node(target) ocomp.add_projection(sender=target, receiver=p.target) @@ -363,7 +365,8 @@ def test_of_show_nested_show_cim_and_show_node_structure(self): ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, oa)]), - ]) + ], + search_space=[[1],[1],[1]]) ocomp.add_controller(ocm) # ocomp.show_graph(show_cim=True, show_nested=INSET) @@ -414,7 +417,8 @@ def test_of_show_3_level_nested_show_cim_and_show_node_structure(self): ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, oa)]), - ]) + ], + search_space=[[1],[1],[1]]) ocomp.add_controller(ocm) gv = ocomp.show_graph(show_nested=False, output_fmt='source') @@ -461,7 +465,8 @@ def test_of_show_nested_show_cim_and_show_node_structure_with_singleton_in_outer ControlSignal(projections=[(NOISE, ia)]), ControlSignal(projections=[(INTERCEPT, ia)]), ControlSignal(projections=[(SLOPE, oa)]), - ]) + ], + search_space=[[1],[1],[1]]) ocomp.add_controller(ocm) ocomp.show_graph(show_cim=True, show_nested=INSET)