`; subclasses may extend this list to include additional ones.
@@ -2390,11 +2406,10 @@ def execute(self,
"""
if self.initialization_status == ContextFlags.INITIALIZED:
- context.string = "{} EXECUTING {}: {}".format(context.source.name,self.name,
- ContextFlags._get_context_string(
- context.flags, EXECUTION_PHASE))
+ context.string = f"{context.source.name} EXECUTING {self.name}: " \
+ f"{ContextFlags._get_context_string(context.flags, EXECUTION_PHASE)}."
else:
- context.string = "{} INITIALIZING {}".format(context.source.name, self.name)
+ context.string = f"{context.source.name} INITIALIZING {self.name}."
if context.source is ContextFlags.COMMAND_LINE:
self._initialize_from_context(context, override=False)
@@ -2480,17 +2495,15 @@ def execute(self,
# Executing or simulating Composition, so get input by updating input_ports
if (input is None
and (context.execution_phase is not ContextFlags.IDLE)
- and (self.input_port.path_afferents != [])):
+ and any(p.path_afferents for p in self.input_ports)):
variable = self._update_input_ports(runtime_port_params[INPUT_PORT_PARAMS], context)
# Direct call to execute Mechanism with specified input, so assign input to Mechanism's input_ports
else:
if context.source & ContextFlags.COMMAND_LINE:
context.execution_phase = ContextFlags.PROCESSING
-
if input is not None:
input = convert_all_elements_to_np_array(input)
-
if input is None:
input = self.defaults.variable
# FIX: this input value is sent to input CIMs when compositions are nested
@@ -2588,6 +2601,15 @@ def execute(self,
return value
def _get_variable_from_input(self, input, context=None):
+ """Return array of results from each InputPort function executed with corresponding input item as its variable
+ This is called when Mechanism is executed on its own (e.g., during init or from the command line).
+ It:
+ - bypasses call to Port._update(), thus ignoring any afferent Projections assigned to the Mechanism;
+ - assigns each item of **input** to variable of corresponding InputPort;
+ - executes function of each InputPort using corresponding item of input as its variable;
+ - returns array of values generated by execution of each InputPort function.
+ """
+
input = convert_to_np_array(input, dimension=2)
num_inputs = np.size(input, 0)
num_input_ports = len(self.input_ports)
@@ -2603,13 +2625,29 @@ def _get_variable_from_input(self, input, context=None):
"its number of input_ports ({2})".
format(num_inputs, self.name, num_input_ports ))
for input_item, input_port in zip(input, self.input_ports):
- if len(input_port.defaults.value) == len(input_item):
- input_port.parameters.value._set(input_item, context)
+ if input_port.default_input_shape.size == np.array(input_item).size:
+ from psyneulink.core.compositions.composition import RunError
+
+ # Assign input_item as input_port.variable
+ input_port.parameters.variable._set(np.atleast_2d(input_item), context)
+
+ # Call input_port._execute with newly assigned variable and assign result to input_port.value
+ base_error_msg = f"Input to '{self.name}' ({input_item}) is incompatible " \
+ f"with its corresponding {InputPort.__name__} ({input_port.full_name})"
+ try:
+ input_port.parameters.value._set(
+ input_port._execute(input_port.parameters.variable.get(context), context),
+ context)
+ except (RunError,TypeError) as error:
+ raise MechanismError(f"{base_error_msg}: '{error.args[0]}.'")
+ except:
+ raise MechanismError(f"{base_error_msg}.")
else:
raise MechanismError(f"Length ({len(input_item)}) of input ({input_item}) does not match "
- f"required length ({len(input_port.defaults.variable)}) for input "
+ f"required length ({input_port.default_input_shape.size}) for input "
f"to {InputPort.__name__} {repr(input_port.name)} of {self.name}.")
+ # Return values of input_ports for use as variable of Mechanism
return convert_to_np_array(self.get_input_values(context))
def _update_input_ports(self, runtime_input_port_params=None, context=None):
@@ -2822,7 +2860,11 @@ def _get_output_struct_type(self, ctx):
def _get_input_struct_type(self, ctx):
# Extract the non-modulation portion of InputPort input struct
- input_type_list = [ctx.get_input_struct_type(port).elements[0] for port in self.input_ports]
+ def _get_data_part_of_input_struct(p):
+ struct_ty = ctx.get_input_struct_type(p)
+ return struct_ty.elements[0] if len(p.mod_afferents) > 0 else struct_ty
+
+ input_type_list = [_get_data_part_of_input_struct(port) for port in self.input_ports]
# Get modulatory inputs
@@ -2849,7 +2891,7 @@ def _get_state_initializer(self, context):
return (port_state_init, *mech_state_init)
def _gen_llvm_ports(self, ctx, builder, ports, group,
- get_output_ptr, fill_input_data,
+ get_output_ptr, get_input_data_ptr,
mech_params, mech_state, mech_input):
group_ports = getattr(self, group)
ports_param = pnlvm.helpers.get_param_ptr(builder, self, mech_params, group)
@@ -2859,14 +2901,35 @@ def _gen_llvm_ports(self, ctx, builder, ports, group,
for i, port in enumerate(ports):
p_function = ctx.import_llvm_function(port)
- # Find output location
+ # Find input and output locations
+ builder, p_input_data = get_input_data_ptr(builder, i)
builder, p_output = get_output_ptr(builder, i)
- # Allocate the input structure (data + modulation)
- p_input = builder.alloca(p_function.args[2].type.pointee)
+ if len(port.mod_afferents) == 0:
+ # There's no modulation so the only input is data
+ if p_input_data.type == p_function.args[2].type:
+ p_input = p_input_data
+ else:
+ assert port in self.output_ports
+ # Ports always take at least 2d input. However, parsing
+ # the function result can result in 1d structure or scalar
+ # Casting the pointer is LLVM way of adding dimensions
+ array_1d = pnlvm.ir.ArrayType(p_input_data.type.pointee, 1)
+ array_2d = pnlvm.ir.ArrayType(array_1d, 1)
+ assert array_1d == p_function.args[2].type.pointee or array_2d == p_function.args[2].type.pointee, \
+ "{} vs. {}".format(p_function.args[2].type.pointee, p_input_data.type.pointee)
+ p_input = builder.bitcast(p_input_data, p_function.args[2].type)
- # Copy input data to input structure
- builder = fill_input_data(builder, p_input, i)
+ else:
+ # Port input structure is: (data, [modulations]),
+ p_input = builder.alloca(p_function.args[2].type.pointee,
+ name=group + "_port_" + str(i) + "_input")
+ # Fill in the data.
+ # FIXME: We can potentially hit the same dimensionality issue
+ # as above, but it's more difficult to manifest and
+ # not even new tests that modulate output ports hit it.
+ p_data = builder.gep(p_input, [ctx.int32_ty(0), ctx.int32_ty(0)])
+ builder.store(builder.load(p_input_data), p_data)
# Copy mod_afferent inputs
for idx, p_mod in enumerate(port.mod_afferents):
@@ -2903,58 +2966,51 @@ def _gen_llvm_input_ports(self, ctx, builder,
else:
ip_output_type = pnlvm.ir.LiteralStructType(ip_output_list)
- ip_output = builder.alloca(ip_output_type)
+ ip_output = builder.alloca(ip_output_type, name="input_ports_out")
def _get_output_ptr(b, i):
ptr = b.gep(ip_output, [ctx.int32_ty(0), ctx.int32_ty(i)])
return b, ptr
- def _fill_input(b, p_input, i):
- ip_in = builder.gep(mech_input, [ctx.int32_ty(0), ctx.int32_ty(i)])
- # Input port inputs are {original parameter, [modulations]},
- # fill in the first one.
- data_ptr = builder.gep(p_input, [ctx.int32_ty(0), ctx.int32_ty(0)])
- b.store(b.load(ip_in), data_ptr)
- return b
+ def _get_input_data_ptr(b, i):
+ ptr = builder.gep(mech_input, [ctx.int32_ty(0), ctx.int32_ty(i)])
+ return b, ptr
builder = self._gen_llvm_ports(ctx, builder, self.input_ports, "input_ports",
- _get_output_ptr, _fill_input,
+ _get_output_ptr, _get_input_data_ptr,
mech_params, mech_state, mech_input)
return ip_output, builder
def _gen_llvm_param_ports_for_obj(self, obj, params_in, ctx, builder,
mech_params, mech_state, mech_input):
- # Allocate a shadow structure to overload user supplied parameters
- params_out = builder.alloca(params_in.type.pointee)
- # Copy original values. This handles params without param ports.
- # Few extra copies will be eliminated by the compiler.
- builder.store(builder.load(params_in), params_out)
-
# This should be faster than 'obj._get_compilation_params'
compilation_params = (getattr(obj.parameters, p_id, None) for p_id in obj.llvm_param_ids)
# Filter out param ports without corresponding param for this function
param_ports = [self._parameter_ports[param] for param in compilation_params if param in self._parameter_ports]
+ # Exit early if there's no modulation. It's difficult for compiler
+ # to replace pointer arguments to functions with the source location.
+ if len(param_ports) == 0:
+ return params_in, builder
+
+ # Allocate a shadow structure to overload user supplied parameters
+ params_out = builder.alloca(params_in.type.pointee, name="modulated_parameters")
+ if len(param_ports) != len(obj.llvm_param_ids):
+ builder = pnlvm.helpers.memcpy(builder, params_out, params_in)
+
def _get_output_ptr(b, i):
ptr = pnlvm.helpers.get_param_ptr(b, obj, params_out,
param_ports[i].source.name)
return b, ptr
- def _fill_input(b, p_input, i):
- param_ptr = pnlvm.helpers.get_param_ptr(b, obj, params_in,
- param_ports[i].source.name)
- # Parameter port inputs are {original parameter, [modulations]},
- # here we fill in the first one.
- data_ptr = builder.gep(p_input, [ctx.int32_ty(0), ctx.int32_ty(0)])
- assert data_ptr.type == param_ptr.type, \
- "Mishandled modulation type for: {} in '{}' in '{}'".format(
- param_ports[i].name, obj.name, self.name)
- b.store(b.load(param_ptr), data_ptr)
- return b
+ def _get_input_data_ptr(b, i):
+ ptr = pnlvm.helpers.get_param_ptr(b, obj, params_in,
+ param_ports[i].source.name)
+ return b, ptr
builder = self._gen_llvm_ports(ctx, builder, param_ports, "_parameter_ports",
- _get_output_ptr, _fill_input,
+ _get_output_ptr, _get_input_data_ptr,
mech_params, mech_state, mech_input)
return params_out, builder
@@ -2963,17 +3019,41 @@ def _gen_llvm_output_port_parse_variable(self, ctx, builder,
port_spec = port._variable_spec
if port_spec == OWNER_VALUE:
return value
- elif isinstance(port_spec, tuple) and port_spec[0] == OWNER_VALUE:
- index = port_spec[1]() if callable(port_spec[1]) else port_spec[1]
-
- assert index < len(value.type.pointee)
- return builder.gep(value, [ctx.int32_ty(0), ctx.int32_ty(index)])
elif port_spec == OWNER_EXECUTION_COUNT:
- execution_count = pnlvm.helpers.get_state_ptr(builder, self, mech_state, "execution_count")
- return execution_count
+ # Convert execution count to (num_executions, TimeScale.LIFE)
+ # The difference in Python PNL is that the former counts across
+ # all contexts. This is not possible in compiled code, thus
+ # the two are identical.
+ port_spec = ("num_executions", TimeScale.LIFE)
+
+ try:
+ name = port_spec[0]
+ ids = (x() if callable(x) else getattr(x, 'value', x) for x in port_spec[1:])
+ except TypeError as e:
+ # TypeError means we can't index.
+ # Convert this to assertion failure below
+ pass
else:
#TODO: support more spec options
- assert False, "Unsupported OutputPort spec: {} ({})".format(port_spec, value.type)
+ if name == OWNER_VALUE:
+ data = value
+ elif name in self.llvm_state_ids:
+ data = pnlvm.helpers.get_state_ptr(builder, self, mech_state, name)
+ else:
+ data = None
+
+ if data is not None:
+ parsed = builder.gep(data, [ctx.int32_ty(0), *(ctx.int32_ty(i) for i in ids)])
+ # "num_executions" are kept as int64, we need to convert the value to float first
+ if name == "num_executions":
+ count = builder.load(parsed)
+ count_fp = builder.uitofp(count, ctx.float_ty)
+ parsed = builder.alloca(count_fp.type)
+ builder.store(count_fp, parsed)
+
+ return parsed
+
+ assert False, "Unsupported OutputPort spec: {} ({})".format(port_spec, value.type)
def _gen_llvm_output_ports(self, ctx, builder, value,
mech_params, mech_state, mech_in, mech_out):
@@ -2981,88 +3061,75 @@ def _get_output_ptr(b, i):
ptr = b.gep(mech_out, [ctx.int32_ty(0), ctx.int32_ty(i)])
return b, ptr
- def _fill_input(b, s_input, i):
- data_ptr = self._gen_llvm_output_port_parse_variable(ctx, b,
+ def _get_input_data_ptr(b, i):
+ ptr = self._gen_llvm_output_port_parse_variable(ctx, b,
mech_params, mech_state, value, self.output_ports[i])
- # Output port inputs are {original parameter, [modulations]},
- # fill in the first one.
- input_ptr = builder.gep(s_input, [ctx.int32_ty(0), ctx.int32_ty(0)])
- if input_ptr.type != data_ptr.type:
- port = self.output_ports[i]
- warnings.warn("Shape mismatch: {} parsed value does not match "
- "output port: mech value: {} spec: {} parsed {}.".format(
- port, self.defaults.value, port._variable_spec,
- port.defaults.variable))
- input_ptr = builder.gep(input_ptr, [ctx.int32_ty(0), ctx.int32_ty(0)])
- b.store(b.load(data_ptr), input_ptr)
- return b
+ return b, ptr
builder = self._gen_llvm_ports(ctx, builder, self.output_ports, "output_ports",
- _get_output_ptr, _fill_input,
+ _get_output_ptr, _get_input_data_ptr,
mech_params, mech_state, mech_in)
return builder
- def _gen_llvm_invoke_function(self, ctx, builder, function, params, state, variable, *, tags:frozenset):
+ def _gen_llvm_invoke_function(self, ctx, builder, function, f_params, f_state, variable, *, tags:frozenset):
fun = ctx.import_llvm_function(function, tags=tags)
- fun_out = builder.alloca(fun.args[3].type.pointee)
+ fun_out = builder.alloca(fun.args[3].type.pointee, name=function.name + "_output")
- builder.call(fun, [params, state, variable, fun_out])
+ builder.call(fun, [f_params, f_state, variable, fun_out])
return fun_out, builder
- def _gen_llvm_is_finished_cond(self, ctx, builder, params, state):
+ def _gen_llvm_is_finished_cond(self, ctx, builder, m_params, m_state):
return ctx.bool_ty(1)
- def _gen_llvm_mechanism_functions(self, ctx, builder, params, state, arg_in,
+ def _gen_llvm_mechanism_functions(self, ctx, builder, m_base_params, m_params, m_state, arg_in,
ip_output, *, tags:frozenset):
# Default mechanism runs only the main function
- f_params_ptr = pnlvm.helpers.get_param_ptr(builder, self, params, "function")
+ f_base_params = pnlvm.helpers.get_param_ptr(builder, self, m_base_params, "function")
f_params, builder = self._gen_llvm_param_ports_for_obj(
- self.function, f_params_ptr, ctx, builder, params, state, arg_in)
- f_state = pnlvm.helpers.get_state_ptr(builder, self, state, "function")
+ self.function, f_base_params, ctx, builder, m_base_params, m_state, arg_in)
+ f_state = pnlvm.helpers.get_state_ptr(builder, self, m_state, "function")
return self._gen_llvm_invoke_function(ctx, builder, self.function,
f_params, f_state, ip_output,
tags=tags)
- def _gen_llvm_function_internal(self, ctx, builder, params, state, arg_in,
- arg_out, *, tags:frozenset):
+ def _gen_llvm_function_internal(self, ctx, builder, m_params, m_state, arg_in,
+ arg_out, m_base_params, *, tags:frozenset):
ip_output, builder = self._gen_llvm_input_ports(ctx, builder,
- params, state, arg_in)
-
- value, builder = self._gen_llvm_mechanism_functions(ctx, builder, params,
- state, arg_in,
- ip_output,
- tags=tags)
-
- # Update execution counter
- exec_count_ptr = pnlvm.helpers.get_state_ptr(builder, self, state, "execution_count")
- exec_count = builder.load(exec_count_ptr)
- exec_count = builder.fadd(exec_count, exec_count.type(1))
- builder.store(exec_count, exec_count_ptr)
-
- # Update internal clock (i.e. num_executions parameter)
- num_executions_ptr = pnlvm.helpers.get_state_ptr(builder, self, state, "num_executions")
- for scale in [TimeScale.TIME_STEP, TimeScale.PASS, TimeScale.TRIAL, TimeScale.RUN]:
- num_exec_time_ptr = builder.gep(num_executions_ptr, [ctx.int32_ty(0), ctx.int32_ty(scale.value)])
+ m_base_params, m_state, arg_in)
+
+ value, builder = self._gen_llvm_mechanism_functions(ctx, builder, m_base_params,
+ m_params, m_state, arg_in,
+ ip_output, tags=tags)
+
+
+ # Update num_executions parameter
+ num_executions_ptr = pnlvm.helpers.get_state_ptr(builder, self, m_state, "num_executions")
+ for scale in TimeScale:
+ assert scale.value < len(num_executions_ptr.type.pointee)
+ num_exec_time_ptr = builder.gep(num_executions_ptr,
+ [ctx.int32_ty(0), ctx.int32_ty(scale.value)],
+ name="num_executions_{}_ptr".format(scale))
new_val = builder.load(num_exec_time_ptr)
new_val = builder.add(new_val, new_val.type(1))
builder.store(new_val, num_exec_time_ptr)
- builder = self._gen_llvm_output_ports(ctx, builder, value, params, state, arg_in, arg_out)
-
- val_ptr = pnlvm.helpers.get_state_ptr(builder, self, state, "value")
+ val_ptr = pnlvm.helpers.get_state_ptr(builder, self, m_state, "value")
if val_ptr.type.pointee == value.type.pointee:
- pnlvm.helpers.push_state_val(builder, self, state, "value", value)
+ pnlvm.helpers.push_state_val(builder, self, m_state, "value", value)
else:
# FIXME: Does this need some sort of parsing?
warnings.warn("Shape mismatch: function result does not match mechanism value param: {} vs. {}".format(value.type.pointee, val_ptr.type.pointee))
+ # Run output ports after updating the mech state (num_executions and value)
+ builder = self._gen_llvm_output_ports(ctx, builder, value, m_base_params, m_state, arg_in, arg_out)
+
# is_finished should be checked after output ports ran
is_finished_f = ctx.import_llvm_function(self, tags=tags.union({"is_finished"}))
- is_finished_cond = builder.call(is_finished_f, [params, state, arg_in,
+ is_finished_cond = builder.call(is_finished_f, [m_params, m_state, arg_in,
arg_out])
return builder, is_finished_cond
@@ -3071,8 +3138,8 @@ def _gen_llvm_function_reset(self, ctx, builder, params, state, arg_in, arg_out,
reinit_func = ctx.import_llvm_function(self.function, tags=tags)
reinit_params = pnlvm.helpers.get_param_ptr(builder, self, params, "function")
reinit_state = pnlvm.helpers.get_state_ptr(builder, self, state, "function")
- reinit_in = builder.alloca(reinit_func.args[2].type.pointee)
- reinit_out = builder.alloca(reinit_func.args[3].type.pointee)
+ reinit_in = builder.alloca(reinit_func.args[2].type.pointee, name="reinit_in")
+ reinit_out = builder.alloca(reinit_func.args[3].type.pointee, name="reinit_out")
builder.call(reinit_func, [reinit_params, reinit_state, reinit_in,
reinit_out])
@@ -3096,11 +3163,11 @@ def _gen_llvm_function(self, *, extra_args=[], ctx:pnlvm.LLVMBuilderContext, tag
builder.ret(finished)
return builder.function
- def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out, *, tags:frozenset):
+ def _gen_llvm_function_body(self, ctx, builder, base_params, state, arg_in, arg_out, *, tags:frozenset):
assert "reset" not in tags
params, builder = self._gen_llvm_param_ports_for_obj(
- self, params, ctx, builder, params, state, arg_in)
+ self, base_params, ctx, builder, base_params, state, arg_in)
is_finished_flag_ptr = pnlvm.helpers.get_state_ptr(builder, self, state,
"is_finished_flag")
@@ -3126,17 +3193,19 @@ def _gen_llvm_function_body(self, ctx, builder, params, state, arg_in, arg_out,
# Get internal function
args_t = [a.type for a in builder.function.args]
+ args_t[4:4] = [base_params.type]
internal_builder = ctx.create_llvm_function(args_t, self,
name=builder.function.name + "_internal",
return_type=ctx.bool_ty)
- iparams, istate, iin, iout = internal_builder.function.args[:4]
+ iparams, istate, iin, iout, ibase_params = internal_builder.function.args[:5]
internal_builder, is_finished = self._gen_llvm_function_internal(ctx, internal_builder,
- iparams, istate, iin, iout, tags=tags)
+ iparams, istate, iin, iout,
+ ibase_params, tags=tags)
internal_builder.ret(is_finished)
# Call Internal Function
internal_f = internal_builder.function
- is_finished_cond = builder.call(internal_f, [params, state, arg_in, arg_out, *builder.function.args[4:]])
+ is_finished_cond = builder.call(internal_f, [params, state, arg_in, arg_out, base_params, *builder.function.args[4:]])
#FIXME: Flag and count should be int instead of float
# Check if we reached maximum iteration count
@@ -3393,7 +3462,7 @@ def port_cell(port, include_function:bool=False, include_value:bool=False, use_l
value=''
if include_value:
if use_label and not isinstance(port, ParameterPort):
- value = f'
={port.label}'
+ value = f'
={port.labeled_value}'
else:
value = f'
={port.value}'
return f'{port.name}{function}{value} | '
@@ -3824,6 +3893,17 @@ def _get_port_value_labels(self, port_type, context=None):
def input_port(self):
return self.input_ports[0]
+ def get_input_variables(self, context=None):
+ # FIX: 2/4/22 THIS WOULD PARALLEL get_input_values BUT MAY NOT BE NEEDED:
+ # input_variables = []
+ # for input_port in self.input_ports:
+ # if "LearningSignal" in input_port.name:
+ # input_variables.append(input_port.parameters.variable.get(context).flatten())
+ # else:
+ # input_variables.append(input_port.parameters.variable.get(context))
+ # return input_variables
+ return [input_port.parameters.variable.get(context) for input_port in self.input_ports]
+
@property
def input_values(self):
try:
@@ -3847,6 +3927,51 @@ def external_input_ports(self):
except (TypeError, AttributeError):
return None
+ @property
+ def external_input_shape(self):
+ """Alias for _default_external_input_shape"""
+ return self._default_external_input_shape
+
+ @property
+ def _default_external_input_shape(self):
+ try:
+ shape = []
+ for input_port in self.input_ports:
+ if input_port.internal_only or input_port.default_input:
+ continue
+ if input_port._input_shape_template == VARIABLE:
+ shape.append(input_port.defaults.variable)
+ elif input_port._input_shape_template == VALUE:
+ shape.append(input_port.defaults.value)
+ else:
+ assert False, f"PROGRAM ERROR: bad changes_shape in attempt to assign " \
+ f"default_external_input_shape for '{input_port.name}' of '{self.name}."
+ return shape
+ except (TypeError, AttributeError):
+ return None
+
+ @property
+ def external_input_variables(self):
+ """Returns variables of all external InputPorts that belong to the Mechanism"""
+ try:
+ return [input_port.variable for input_port in self.input_ports if not input_port.internal_only]
+ except (TypeError, AttributeError):
+ return None
+
+ @property
+ def default_external_inputs(self):
+ try:
+ return [input_port.default_input for input_port in self.input_ports if not input_port.internal_only]
+ except (TypeError, AttributeError):
+ return None
+
+ @property
+ def default_external_input_variables(self):
+ try:
+ return [input_port.defaults.variable for input_port in self.input_ports if not input_port.internal_only]
+ except (TypeError, AttributeError):
+ return None
+
@property
def external_input_values(self):
try:
@@ -3862,7 +3987,7 @@ def default_external_input_values(self):
return None
@property
- def input_labels(self):
+ def labeled_input_values(self):
"""
Returns a list with as many items as there are InputPorts of the Mechanism. Each list item represents the value
of the corresponding InputPort, and is populated by a string label (from the input_labels_dict) when one
@@ -3886,13 +4011,13 @@ def output_port(self):
@property
def output_values(self):
- return self.output_ports.values
+ return self.get_output_values()
def get_output_values(self, context=None):
return [output_port.parameters.value.get(context) for output_port in self.output_ports]
@property
- def output_labels(self):
+ def labeled_output_values(self):
"""
Returns a list with as many items as there are OutputPorts of the Mechanism. Each list item represents the
value of the corresponding OutputPort, and is populated by a string label (from the output_labels_dict) when
@@ -3906,8 +4031,8 @@ def get_output_labels(self, context=None):
elif context:
return self.get_output_values(context)
else:
- return self.output_values
-
+ # Use this to report most recent value if no context is available
+ return self.output_ports.values
@property
def ports(self):
@@ -3994,28 +4119,87 @@ def _dependent_components(self):
self.parameter_ports,
))
- @property
- def _dict_summary(self):
- inputs_dict = {
- MODEL_SPEC_ID_INPUT_PORTS: [
- s._dict_summary for s in self.input_ports
- ]
- }
- inputs_dict[MODEL_SPEC_ID_INPUT_PORTS].extend(
- [s._dict_summary for s in self.parameter_ports]
+ def as_mdf_model(self):
+ import modeci_mdf.mdf as mdf
+
+ model = mdf.Node(
+ id=parse_valid_identifier(self.name),
+ **self._mdf_metadata,
)
- outputs_dict = {
- MODEL_SPEC_ID_OUTPUT_PORTS: [
- s._dict_summary for s in self.output_ports
- ]
- }
+ for name, val in self._mdf_model_parameters[self._model_spec_id_parameters].items():
+ model.parameters.append(mdf.Parameter(id=name, value=val))
- return {
- **super()._dict_summary,
- **inputs_dict,
- **outputs_dict
- }
+ for ip in self.input_ports:
+ if len(ip.path_afferents) > 1:
+ for aff in ip.path_afferents:
+ ip_model = mdf.InputPort(
+ id=parse_valid_identifier(f'{self.name}_input_port_{aff.name}'),
+ shape=str(aff.defaults.value.shape),
+ type=str(aff.defaults.value.dtype)
+ )
+ model.input_ports.append(ip_model)
+
+ # create combination function
+ model.parameters.append(
+ mdf.Parameter(
+ id='combination_function_input_data',
+ value=f"[{', '.join(f'{mip.id}' for mip in model.input_ports)}]"
+ )
+ )
+ combination_function_id = f'{parse_valid_identifier(self.name)}_{MODEL_SPEC_ID_INPUT_PORT_COMBINATION_FUNCTION}'
+ combination_function_args = {
+ 'data': "combination_function_input_data",
+ 'axes': 0
+ }
+ model.functions.append(
+ mdf.Function(
+ id=combination_function_id,
+ function={'onnx::ReduceSum': combination_function_args},
+ args=combination_function_args
+ )
+ )
+ combination_function_dimreduce_id = f'{combination_function_id}_dimreduce'
+ model.functions.append(
+ mdf.Function(
+ id=combination_function_dimreduce_id,
+ value=f'{MODEL_SPEC_ID_MDF_VARIABLE}[0][0]',
+ args={
+ MODEL_SPEC_ID_MDF_VARIABLE: combination_function_id,
+ }
+ )
+ )
+ else:
+ ip_model = ip.as_mdf_model()
+ ip_model.id = f'{parse_valid_identifier(self.name)}_{ip_model.id}'
+
+ model.input_ports.append(ip_model)
+
+ for op in self.output_ports:
+ op_model = op.as_mdf_model()
+ op_model.id = f'{parse_valid_identifier(self.name)}_{op_model.id}'
+
+ model.output_ports.append(op_model)
+
+ function_model = self.function.as_mdf_model()
+
+ for _, func_param in function_model.metadata['function_stateful_params'].items():
+ model.parameters.append(mdf.Parameter(**func_param))
+
+ if len(ip.path_afferents) > 1:
+ primary_function_input_name = combination_function_dimreduce_id
+ else:
+ primary_function_input_name = model.input_ports[0].id
+
+ self.function._set_mdf_arg(
+ function_model, _get_variable_parameter_name(self.function), primary_function_input_name
+ )
+ model.functions.append(function_model)
+
+ for func_model in model.functions:
+ _substitute_expression_args(func_model)
+
+ return model
def _is_mechanism_spec(spec):
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
index d3c8b25f947..3da410a7ae5 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/controlmechanism.py
@@ -72,7 +72,7 @@
ControlMechanism can be assigned as the `controller ` for a Composition by specifying it in
the **controller** argument of the Composition's constructor, or by using the Composition's `add_controller
` method. A Composition's `controller ` and its associated
-Components can be displayed using the Composition's `show_graph ` method with its
+Components can be displayed using the Composition's `show_graph ` method with its
**show_control** argument assigned as `True`.
@@ -92,7 +92,7 @@
*Specifying OutputPorts to be monitored*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-A ControlMechanism can be configured to monitor the output of other Mechanisms directly (by receiving direct
+A ControlMechanism can be configured to monitor the output of other Mechanisms either directly (by receiving direct
Projections from their OutputPorts), or by way of an `ObjectiveMechanism` that evaluates those outputs and passes the
result to the ControlMechanism (see `below ` for more detailed description).
The following figures show an example of each:
@@ -1029,6 +1029,7 @@ class ControlMechanism(ModulatoryMechanism_Base):
"""
componentType = "ControlMechanism"
+ controlType = CONTROL # Used as key in specification dictionaries; can be overridden by subclasses
initMethod = INIT_EXECUTE_METHOD_ONLY
@@ -1145,6 +1146,12 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
:type:
:read only: True
+ outcome_input_ports
+ see `outcome_input_ports `
+
+ :default value: None
+ :type: ``list``
+
output_ports
see `output_ports `
@@ -1182,6 +1189,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
objective_mechanism = Parameter(None, stateful=False, loggable=False, structural=True)
outcome_input_ports_option = Parameter(SEPARATE, stateful=False, loggable=False, structural=True)
+ outcome_input_ports = Parameter(None, reference=True, stateful=False, loggable=False, read_only=True)
input_ports = Parameter(
[OUTCOME],
@@ -1197,6 +1205,7 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
stateful=False,
loggable=False,
read_only=True,
+ structural=True,
)
output_ports = Parameter(
@@ -1206,8 +1215,8 @@ class Parameters(ModulatoryMechanism_Base.Parameters):
read_only=True,
structural=True,
parse_spec=True,
- aliases=['control', 'control_signals'],
- constructor_argument='control'
+ aliases=[CONTROL, CONTROL_SIGNALS],
+ constructor_argument=CONTROL
)
def _parse_output_ports(self, output_ports):
@@ -1231,17 +1240,14 @@ def is_2tuple(o):
MECHANISM: output_ports[i][1]
}
# handle dict of form {PROJECTIONS: <2 item tuple>, : , ...}
- elif (
- isinstance(output_ports[i], dict)
- and PROJECTIONS in output_ports[i]
- and is_2tuple(output_ports[i][PROJECTIONS])
- ):
- full_spec_dict = {
- NAME: output_ports[i][PROJECTIONS][0],
- MECHANISM: output_ports[i][PROJECTIONS][1],
- **{k: v for k, v in output_ports[i].items() if k != PROJECTIONS}
- }
- output_ports[i] = full_spec_dict
+ elif isinstance(output_ports[i], dict):
+ for PROJ_SPEC_KEYWORD in {PROJECTIONS, self._owner.controlType}:
+ if (PROJ_SPEC_KEYWORD in output_ports[i] and is_2tuple(output_ports[i][PROJ_SPEC_KEYWORD])):
+ tuple_spec = output_ports[i].pop(PROJ_SPEC_KEYWORD)
+ output_ports[i].update({
+ NAME: tuple_spec[0],
+ MECHANISM: tuple_spec[1]})
+ assert True
return output_ports
@@ -1282,9 +1288,10 @@ def __init__(self,
**kwargs
):
- monitor_for_control = convert_to_list(monitor_for_control) or []
control = convert_to_list(control) or []
+ monitor_for_control = convert_to_list(monitor_for_control) or []
self.allow_probes = allow_probes
+ self._sim_counts = {}
# For backward compatibility:
if kwargs:
@@ -1296,7 +1303,7 @@ def __init__(self,
# Only allow one of CONTROL, MODULATORY_SIGNALS OR CONTROL_SIGNALS to be specified
# These are synonyms, but allowing several to be specified and trying to combine the specifications
# can cause problems if different forms of specification are used to refer to the same Component(s)
- control_specified = "'control'" if control else ''
+ control_specified = f"'{CONTROL}'" if control else ''
modulatory_signals_specified = ''
if MODULATORY_SIGNALS in kwargs:
args = kwargs.pop(MODULATORY_SIGNALS)
@@ -1324,8 +1331,6 @@ def __init__(self,
function = function or DefaultAllocationFunction
- self._sim_counts = {}
-
super(ControlMechanism, self).__init__(
default_variable=default_variable,
size=size,
@@ -1356,9 +1361,17 @@ def _validate_params(self, request_set, target_set=None, context=None):
target_set=target_set,
context=context)
- if OBJECTIVE_MECHANISM in target_set and \
- target_set[OBJECTIVE_MECHANISM] is not None and\
- target_set[OBJECTIVE_MECHANISM] is not False:
+ if (MONITOR_FOR_CONTROL in target_set
+ and target_set[MONITOR_FOR_CONTROL] is not None
+ and any(item for item in target_set[MONITOR_FOR_CONTROL]
+ if (isinstance(item, ObjectiveMechanism) or item is ObjectiveMechanism))):
+ raise ControlMechanismError(f"The '{MONITOR_FOR_CONTROL}' arg of '{self.name}' contains a specification for"
+ f" an {ObjectiveMechanism.componentType} ({target_set[MONITOR_FOR_CONTROL]}). "
+ f"This should be specified in its '{OBJECTIVE_MECHANISM}' argument.")
+
+ if (OBJECTIVE_MECHANISM in target_set and
+ target_set[OBJECTIVE_MECHANISM] is not None
+ and target_set[OBJECTIVE_MECHANISM] is not False):
if isinstance(target_set[OBJECTIVE_MECHANISM], list):
@@ -1385,22 +1398,25 @@ def _validate_params(self, request_set, target_set=None, context=None):
validate_monitored_port_spec(self, obj_mech_spec_list)
if not isinstance(target_set[OBJECTIVE_MECHANISM], (ObjectiveMechanism, list, bool)):
- raise ControlMechanismError("Specification of {} arg for {} ({}) must be an {}"
- "or a list of Mechanisms and/or OutputPorts to be monitored for control".
- format(OBJECTIVE_MECHANISM,
- self.name, target_set[OBJECTIVE_MECHANISM],
- ObjectiveMechanism.componentName))
+ raise ControlMechanismError(f"Specification of {OBJECTIVE_MECHANISM} arg for '{self.name}' "
+ f"({target_set[OBJECTIVE_MECHANISM].name}) must be an "
+ f"{ObjectiveMechanism.componentType} or a list of Mechanisms and/or "
+ f"OutputPorts to be monitored for control.")
if CONTROL in target_set and target_set[CONTROL]:
control = target_set[CONTROL]
- assert isinstance(control, list), \
- f"PROGRAM ERROR: control arg {control} of {self.name} should have been converted to a list."
- for ctl_spec in control:
- ctl_spec = _parse_port_spec(port_type=ControlSignal, owner=self, port_spec=ctl_spec)
- if not (isinstance(ctl_spec, ControlSignal)
- or (isinstance(ctl_spec, dict) and ctl_spec[PORT_TYPE]==ControlSignal.__name__)):
- raise ControlMechanismError(f"Invalid specification for '{CONTROL}' argument of {self.name}:"
- f"({ctl_spec})")
+ self._validate_control_arg(control)
+
+ def _validate_control_arg(self, control):
+ """Treat control arg separately so it can be overridden by subclassses (e.g., GatingMechanism)"""
+ assert isinstance(control, list), \
+ f"PROGRAM ERROR: control arg {control} of {self.name} should have been converted to a list."
+ for ctl_spec in control:
+ ctl_spec = _parse_port_spec(port_type=ControlSignal, owner=self, port_spec=ctl_spec)
+ if not (isinstance(ctl_spec, ControlSignal)
+ or (isinstance(ctl_spec, dict) and ctl_spec[PORT_TYPE] == ControlSignal)):
+ raise ControlMechanismError(f"Invalid specification for '{CONTROL}' argument of {self.name}:"
+ f"({ctl_spec})")
# IMPLEMENTATION NOTE: THIS SHOULD BE MOVED TO COMPOSITION ONCE THAT IS IMPLEMENTED
def _instantiate_objective_mechanism(self, input_ports=None, context=None):
@@ -1542,7 +1558,8 @@ def _instantiate_input_ports(self, input_ports=None, context=None):
other_input_ports = input_ports or []
# FIX 11/3/21: THIS SHOULD BE MADE A PARAMETER
- self.outcome_input_ports = ContentAddressableList(component_type=OutputPort)
+ self.parameters.outcome_input_ports.set(ContentAddressableList(component_type=OutputPort),
+ override=True)
# If ObjectiveMechanism is specified, instantiate it and OUTCOME InputPort that receives projection from it
if self.objective_mechanism:
@@ -1551,6 +1568,7 @@ def _instantiate_input_ports(self, input_ports=None, context=None):
# of the objective_mechanism's constructor
self._instantiate_objective_mechanism(input_ports, context=context)
+ # FIX: CONSOLIDATE THIS WITH SIMILAR HANDLING IN _instantiate_objective_mechanism AND ELSE BELOW
# If no ObjectiveMechanism is specified, but items to monitor are specified,
# assign an outcome_input_port for each item specified
elif self.monitor_for_control:
@@ -1579,7 +1597,6 @@ def _instantiate_input_ports(self, input_ports=None, context=None):
from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection
from psyneulink.core.components.mechanisms.processing.objectivemechanism import _parse_monitor_specs
- self.aux_components = []
for i in range(len(projection_specs)):
if option == SEPARATE:
# Each outcome_input_port get its own Projection
@@ -1590,10 +1607,21 @@ def _instantiate_input_ports(self, input_ports=None, context=None):
self.aux_components.append(MappingProjection(sender=projection_specs[i],
receiver=self.outcome_input_ports[outcome_port_index]))
- # Nothing has been specified, so just instantiate the default OUTCOME InputPort
+ # Nothing has been specified, so just instantiate the default OUTCOME InputPort with any input_ports passed in
else:
- super()._instantiate_input_ports(context=context)
+ # # MODIFIED 1/30/21 OLD:
+ # super()._instantiate_input_ports(context=context)
+ # self.outcome_input_ports.append(self.input_ports[OUTCOME])
+ # MODIFIED 1/30/21 NEW:
+ other_input_port_value_sizes = self._handle_arg_input_ports(other_input_ports)[0]
+ # Construct full list of InputPort specifications and sizes
+ input_ports = self.input_ports + other_input_ports
+ input_port_value_sizes = [[0]] + other_input_port_value_sizes
+ super()._instantiate_input_ports(context=context,
+ input_ports=input_ports,
+ reference_value=input_port_value_sizes)
self.outcome_input_ports.append(self.input_ports[OUTCOME])
+ # MODIFIED 1/30/21 END
def _parse_monitor_for_control_input_ports(self, context):
"""Get outcome_input_port specification dictionaries for items specified in monitor_for_control.
@@ -1654,7 +1682,9 @@ def _parse_monitor_for_control_input_ports(self, context):
return outcome_input_port_specs, port_value_sizes, monitored_ports
def _validate_monitor_for_control(self, nodes):
- # Ensure all of the Components being monitored for control are in the Composition being controlled
+ """Ensure all of the Components being monitored for control are in the Composition being controlled
+ If monitor_for_control is specified as an ObjectiveMechanism, warn and move to objective_mecahnism arg
+ """
from psyneulink.core.components.ports.port import Port
invalid_outcome_specs = [item for item in self.monitor_for_control
if ((isinstance(item, Mechanism)
@@ -1696,7 +1726,7 @@ def _register_control_signal_type(self, context=None):
)
def _instantiate_control_signals(self, context):
- """Subclassess can override for class-specific implementation (see OptimizationControlMechanism for example)"""
+ """Subclasses can override for class-specific implementation (see OptimizationControlMechanism for example)"""
output_port_specs = list(enumerate(self.output_ports))
for i, control_signal in output_port_specs:
@@ -1983,6 +2013,23 @@ def _activate_projections_for_compositions(self, composition=None):
for proj in deeply_nested_aux_components.values():
composition.add_projection(proj, sender=proj.sender, receiver=proj.receiver)
+ # Add any remaining afferent Projections that have been assigned and are from nodes in composition
+ remaining_projections = set(self.projections) - dependent_projections - set(self.composition.projections)
+ for proj in remaining_projections:
+ # Projection is afferent:
+ if proj in self.afferents:
+ # Confirm sender is in composition
+ port, node, comp = composition._get_source(proj)
+ elif proj in self.efferents:
+ # Confirm receiver is in composition
+ port, node, comp = composition._get_destination(proj)
+ else:
+ assert False, f"PROGRAM ERROR: Attempt to activate Projection ('{proj.name}') in '{composition.name}'" \
+ f" associated with its controller '{self.name}' that is neither an afferent nor " \
+ f"efferent of '{self.name}' -- May be as yet unaccounted for condition."
+ if node in composition._get_all_nodes():
+ proj._activate_for_compositions(composition)
+
def _apply_control_allocation(self, control_allocation, runtime_params, context):
"""Update values to `control_signals `
based on specified `control_allocation `
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
index dbf03790b30..5338d545fc4 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/gating/gatingmechanism.py
@@ -185,10 +185,11 @@
from psyneulink.core.components.mechanisms.modulatory.control.controlmechanism import ControlMechanism
from psyneulink.core.components.ports.modulatorysignals.gatingsignal import GatingSignal
+from psyneulink.core.components.ports.port import _parse_port_spec
from psyneulink.core.globals.defaults import defaultGatingAllocation
from psyneulink.core.globals.keywords import \
- GATING, GATING_PROJECTION, GATING_SIGNAL, GATING_SIGNALS, \
- INIT_EXECUTE_METHOD_ONLY, MONITOR_FOR_CONTROL, PROJECTION_TYPE
+ CONTROL, CONTROL_SIGNALS, GATE, GATING_PROJECTION, GATING_SIGNAL, GATING_SIGNALS, \
+ INIT_EXECUTE_METHOD_ONLY, MONITOR_FOR_CONTROL, PORT_TYPE, PROJECTIONS, PROJECTION_TYPE
from psyneulink.core.globals.parameters import Parameter
from psyneulink.core.globals.preferences.basepreferenceset import is_pref_set
from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel
@@ -217,7 +218,7 @@ def _is_gating_spec(spec):
GatingMechanism,
ControlMechanism)):
return True
- elif isinstance(spec, str) and spec in {GATING, GATING_PROJECTION, GATING_SIGNAL}:
+ elif isinstance(spec, str) and spec in {GATE, GATING_PROJECTION, GATING_SIGNAL}:
return True
else:
return False
@@ -372,6 +373,7 @@ class GatingMechanism(ControlMechanism):
"""
componentType = "GatingMechanism"
+ controlType = GATE
initMethod = INIT_EXECUTE_METHOD_ONLY
@@ -399,6 +401,13 @@ class Parameters(ControlMechanism.Parameters):
:default value: numpy.array([0.5])
:type: ``numpy.ndarray``
+ output_ports
+ see `output_ports `
+
+ :default value: None
+ :type:
+ :read only: True
+
gating_allocation
see `gating_allocation `
@@ -413,6 +422,17 @@ class Parameters(ControlMechanism.Parameters):
pnl_internal=True
)
+ output_ports = Parameter(
+ None,
+ stateful=False,
+ loggable=False,
+ read_only=True,
+ structural=True,
+ parse_spec=True,
+ aliases=[CONTROL, CONTROL_SIGNALS, 'gate', 'gating_signal'],
+ constructor_argument='gate'
+ )
+
@tc.typecheck
def __init__(self,
default_gating_allocation=None,
@@ -462,12 +482,27 @@ def _register_control_signal_type(self, context=None):
registry=self._portRegistry,
)
+ def _validate_control_arg(self, gate):
+ """Overrided to handle GatingMechanism-specific specifications"""
+ assert isinstance(gate, list), \
+ f"PROGRAM ERROR: 'gate' arg ({gate}) of {self.name} should have been converted to a list."
+ for spec in gate:
+ spec = _parse_port_spec(port_type=GatingSignal, owner=self, port_spec=spec)
+ if not (isinstance(spec, GatingSignal)
+ or (isinstance(spec, dict) and spec[PORT_TYPE] == GatingSignal)):
+ raise GatingMechanismError(f"Invalid specification for '{GATE}' argument of {self.name}: ({spec})")
+
def _instantiate_control_signal_type(self, gating_signal_spec, context):
"""Instantiate actual ControlSignal, or subclass if overridden"""
from psyneulink.core.components.ports.port import _instantiate_port
from psyneulink.core.components.projections.projection import ProjectionError
allocation_parameter_default = self.parameters.gating_allocation.default_value
+
+ # Handle controlType as synonym for PROJECTIONS:
+ if isinstance(gating_signal_spec, dict) and self.controlType in gating_signal_spec:
+ gating_signal_spec[PROJECTIONS] = gating_signal_spec.pop(self.controlType)
+
gating_signal = _instantiate_port(port_type=GatingSignal,
owner=self,
variable=self.default_allocation # User specified value
@@ -476,8 +511,10 @@ def _instantiate_control_signal_type(self, gating_signal_spec, context):
modulation=self.defaults.modulation,
port_spec=gating_signal_spec,
context=context)
+
if not type(gating_signal) in convert_to_list(self.outputPortTypes):
raise ProjectionError(f'{type(gating_signal)} inappropriate for {self.name}')
+
return gating_signal
def _check_for_duplicates(self, control_signal, control_signals, context):
diff --git a/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py b/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
index 6c15e5b26bf..9b7517f0f60 100644
--- a/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
+++ b/psyneulink/core/components/mechanisms/modulatory/control/optimizationcontrolmechanism.py
@@ -11,7 +11,6 @@
# FIX: REWORK WITH REFERENCES TO `outcome `
# INTRODUCE SIMULATION INTO DISCUSSION OF COMPOSITION-BASED
-
"""
Contents
@@ -20,18 +19,20 @@
* `OptimizationControlMechanism_Overview`
- `Expected Value of Control `
- `Agent Representation and Types of Optimization `
- - `Model-Free" Optimization `
- - `Model-Based" Optimization `
+ - `"Model-Free" Optimization `
+ - `Model-Based Optimization `
* `OptimizationControlMechanism_Creation`
- `Agent Rep `
- `State Features `
- - `State Feature Functions `
+ - `agent_rep Composition `
+ - `agent_rep CompositionFunctionApproximator `
+ - `State Feature Functions `
- `Outcome `
* `OptimizationControlMechanism_Structure`
- `Agent Representation `
- `State `
- `Input `
- - `state_input_ports `
+ - `state_input_ports `
- `outcome_input_ports `
- `objective_mechanism `
- `monitor_for_control `
@@ -42,6 +43,7 @@
- `Output `
- `Randomization ControlSignal `
* `OptimizationControlMechanism_Execution`
+ - `OptimizationControlMechanism_Execution_Timing`
- `OptimizationControlMechanism_Optimization_Procedure`
- `OptimizationControlMechanism_Estimation_Randomization`
* `OptimizationControlMechanism_Class_Reference`
@@ -57,18 +59,19 @@
by using the `OptimizationFunction` (assigned as its `function `) to execute
its `agent_rep ` -- a representation of the Composition to be optimized --
under different `control_allocations `, and selecting the one that optimizes
-its `net_outcome