Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix/standard_output_ports_calculate #3114

Merged
merged 21 commits into from
Nov 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
edbc9f3
-
jdcpni Oct 27, 2024
1b26697
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Oct 27, 2024
dd0602a
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Oct 28, 2024
1146c28
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 3, 2024
4081ae1
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 7, 2024
b68ed21
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 9, 2024
ea31161
[skip ci]
jdcpni Nov 9, 2024
bcb84c8
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 12, 2024
55aca0a
[skip ci]
jdcpni Nov 13, 2024
4c83b5d
[skip ci]
jdcpni Nov 13, 2024
2c5af5a
• recurrenttransfermechanism.py
jdcpni Nov 14, 2024
e2032c9
Merge branch 'fix/standard_output_ports_calculate' of https://github.…
jdcpni Nov 14, 2024
52f2029
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 14, 2024
1679424
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 14, 2024
ec9caf6
[skip ci]
jdcpni Nov 14, 2024
20ca515
• recurrenttransfermechanism.py
jdcpni Nov 14, 2024
1d88f42
• recurrenttransfermechanism.py
jdcpni Nov 15, 2024
39b2b45
• test_recurrent_transfer_mechanism.py
jdcpni Nov 15, 2024
3f15a6a
• recurrenttransfermechanism.py
jdcpni Nov 15, 2024
62a6c47
-
jdcpni Nov 15, 2024
3630b92
Merge branch 'devel' of https://github.com/PrincetonUniversity/PsyNeu…
jdcpni Nov 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@
length of array for which stability is calculated.

matrix : list, np.ndarray, function keyword, or MappingProjection : default HOLLOW_MATRIX
weight matrix from each element of `variable <Stability.variablity>` to each other; if a matrix other
weight matrix from each element of `variable <Stability.variable>` to each other; if a matrix other
than HOLLOW_MATRIX is assigned, it is convolved with HOLLOW_MATRIX to eliminate self-connections from the
stability calculation.

Expand Down Expand Up @@ -351,8 +351,8 @@

if isinstance(matrix, MappingProjection):
matrix = matrix._parameter_ports[MATRIX]
elif isinstance(matrix, ParameterPort):
pass
# elif isinstance(matrix, ParameterPort):
# pass
Comment on lines +354 to +355

Check notice

Code scanning / CodeQL

Commented-out code Note

This comment appears to contain commented-out code.
else:
matrix = get_matrix(matrix, size, size)

Expand All @@ -364,9 +364,13 @@
self.defaults.variable]

if self.metric == ENTROPY:
self.metric_fct = Distance(default_variable=default_variable, metric=CROSS_ENTROPY, normalize=self.parameters.normalize.default_value)
self.metric_fct = Distance(default_variable=default_variable,
metric=CROSS_ENTROPY,
normalize=self.parameters.normalize.default_value)
elif self.metric in DISTANCE_METRICS._set():
self.metric_fct = Distance(default_variable=default_variable, metric=self.metric, normalize=self.parameters.normalize.default_value)
self.metric_fct = Distance(default_variable=default_variable,
metric=self.metric,
normalize=self.parameters.normalize.default_value)
else:
assert False, "Unknown metric"

Expand Down Expand Up @@ -462,6 +466,8 @@
# MODIFIED 6/12/19 END

matrix = self._get_current_parameter_value(MATRIX, context)
if matrix is None:
matrix = self.matrix

current = variable

Expand Down Expand Up @@ -538,7 +544,7 @@
length of array for which energy is calculated.

matrix : list, np.ndarray, or matrix keyword
weight matrix from each element of `variable <Energy.variablity>` to each other; if a matrix other
weight matrix from each element of `variable <Energy.variable>` to each other; if a matrix other
than INVERSE_HOLLOW_MATRIX is assigned, it is convolved with HOLLOW_MATRIX to eliminate self-connections from
the energy calculation.

Expand Down Expand Up @@ -566,7 +572,7 @@
default_variable=None,
input_shapes=None,
normalize:bool=None,
# transfer_fct=None,
transfer_fct=None,
matrix=None,
params=None,
owner=None,
Expand All @@ -575,20 +581,20 @@
super().__init__(
default_variable=default_variable,
input_shapes=input_shapes,
metric=ENERGY,
matrix=matrix,
# transfer_fct=transfer_fct,
normalize=normalize,
params=params,
owner=owner,
prefs=prefs)
metric=ENERGY,
matrix=matrix,
transfer_fct=transfer_fct,
normalize=normalize,
params=params,
owner=owner,
prefs=prefs)


class Entropy(Stability):
"""
Entropy( \
default_variable=None, \
input_shapes=None, \
input_shapes=None, \
matrix=INVERSE_HOLLOW_MATRIX, \
transfer_fct=None \
normalize=False, \
Expand Down Expand Up @@ -648,7 +654,7 @@
length of array for which energy is calculated.

matrix : list, np.ndarray, or matrix keyword
weight matrix from each element of `variable <Entropy.variablity>` to each other; if a matrix other
weight matrix from each element of `variable <Entropy.variable>` to each other; if a matrix other
than INVERSE_HOLLOW_MATRIX is assigned, it is convolved with HOLLOW_MATRIX to eliminate self-connections from
the entropy calculation.

Expand All @@ -674,21 +680,24 @@
@check_user_specified
def __init__(self,
default_variable=None,
input_shapes=None,
normalize:bool=None,
matrix=None,
transfer_fct=None,
params=None,
owner=None,
prefs=None):

super().__init__(
default_variable=default_variable,
# matrix=matrix,
metric=ENTROPY,
transfer_fct=transfer_fct,
normalize=normalize,
params=params,
owner=owner,
prefs=prefs)
input_shapes=input_shapes,
metric=ENTROPY,
matrix=matrix,
transfer_fct=transfer_fct,
normalize=normalize,
params=params,
owner=owner,
prefs=prefs)


class Distance(ObjectiveFunction):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,10 +193,11 @@

from psyneulink.core import llvm as pnlvm
from psyneulink.core.components.component import _get_parametervalue_attr
from psyneulink.core.components.functions.nonstateful.transferfunctions import Linear

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.core.components.functions.nonstateful.transferfunctions
begins an import cycle.

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'Linear' is not used.
from psyneulink.core.components.functions.nonstateful.transformfunctions import LinearCombination
from psyneulink.core.components.functions.function import Function, get_matrix
from psyneulink.core.components.functions.nonstateful.learningfunctions import Hebbian
from psyneulink.core.components.functions.nonstateful.objectivefunctions import Stability
from psyneulink.core.components.functions.nonstateful.objectivefunctions import Stability, Energy, Entropy

Check notice

Code scanning / CodeQL

Cyclic import Note

Import of module
psyneulink.core.components.functions.nonstateful.objectivefunctions
begins an import cycle.

Check notice

Code scanning / CodeQL

Unused import Note

Import of 'Stability' is not used.
from psyneulink.core.components.functions.stateful.integratorfunctions import AdaptiveIntegrator
from psyneulink.core.components.functions.userdefinedfunction import UserDefinedFunction
from psyneulink.core.components.mechanisms.mechanism import Mechanism_Base, MechanismError
Expand All @@ -210,7 +211,8 @@
from psyneulink.core.components.projections.pathway.mappingprojection import MappingProjection
from psyneulink.core.globals.context import handle_external_context
from psyneulink.core.globals.keywords import \
AUTO, ENERGY, ENTROPY, HETERO, HOLLOW_MATRIX, INPUT_PORT, MATRIX, NAME, RECURRENT_TRANSFER_MECHANISM, RESULT
(AUTO, ENERGY, ENTROPY, FUNCTION, HETERO, HOLLOW_MATRIX, INPUT_PORT,
MATRIX, NAME, RECURRENT_TRANSFER_MECHANISM, RESULT)
from psyneulink.core.globals.parameters import Parameter, SharedParameter, check_user_specified, copy_parameter_value
from psyneulink.core.globals.preferences.basepreferenceset import ValidPrefSet
from psyneulink.core.globals.registry import register_instance, remove_instance_from_registry
Expand Down Expand Up @@ -243,7 +245,6 @@
ENTROPY_OUTPUT_PORT_NAME=ENTROPY



class RecurrentTransferError(MechanismError):
pass

Expand Down Expand Up @@ -518,13 +519,13 @@

*ENERGY* : float
the energy of the elements in the LCAMechanism's `value <Mechanism_Base.value>`,
calculated using the `Stability` Function using the `ENERGY` metric.
calculated using the `Stability` Function with the `ENERGY` metric.

.. _LCAMechanism_ENTROPY:

*ENTROPY* : float
the entropy of the elements in the LCAMechanism's `value <Mechanism_Base.value>`,
calculated using the `Stability` Function using the `ENTROPY <CROSS_ENTROPY>` metric.
calculated using the `Stability` Function with the `ENTROPY <CROSS_ENTROPY>` metric.

Returns
-------
Expand All @@ -533,6 +534,11 @@
"""
componentType = RECURRENT_TRANSFER_MECHANISM

standard_output_ports = TransferMechanism.standard_output_ports.copy()
standard_output_ports.extend([{NAME:ENERGY_OUTPUT_PORT_NAME}, {NAME:ENTROPY_OUTPUT_PORT_NAME}])
standard_output_port_names = TransferMechanism.standard_output_port_names.copy()
standard_output_port_names.extend([ENERGY_OUTPUT_PORT_NAME, ENTROPY_OUTPUT_PORT_NAME])

class Parameters(TransferMechanism.Parameters):
"""
Attributes
Expand Down Expand Up @@ -637,11 +643,6 @@
)
recurrent_projection = Parameter(None, stateful=False, loggable=False, structural=True)

standard_output_ports = TransferMechanism.standard_output_ports.copy()
standard_output_ports.extend([{NAME:ENERGY_OUTPUT_PORT_NAME}, {NAME:ENTROPY_OUTPUT_PORT_NAME}])
standard_output_port_names = TransferMechanism.standard_output_port_names.copy()
standard_output_port_names.extend([ENERGY_OUTPUT_PORT_NAME, ENTROPY_OUTPUT_PORT_NAME])

@check_user_specified
@beartype
def __init__(self,
Expand Down Expand Up @@ -952,9 +953,20 @@
"""
from psyneulink.library.components.projections.pathway.autoassociativeprojection import AutoAssociativeProjection

matrix = self.parameters.matrix._get(context)

# Now that matrix and default_variable size are known,
# instantiate functions for ENERGY and ENTROPY standard_output_ports
if ENERGY_OUTPUT_PORT_NAME in self.output_ports:
energy_idx = self.standard_output_port_names.index(ENERGY_OUTPUT_PORT_NAME)
self.standard_output_ports[energy_idx][FUNCTION] = Energy(self.defaults.variable,
matrix=matrix)
if ENTROPY_OUTPUT_PORT_NAME in self.output_ports:
energy_idx = self.standard_output_port_names.index(ENTROPY_OUTPUT_PORT_NAME)
self.standard_output_ports[energy_idx][FUNCTION] = Entropy(self.defaults.variable)

super()._instantiate_attributes_after_function(context=context)

matrix = self.parameters.matrix._get(context)
# (7/19/17 CW) this line of code is now questionable, given the changes to matrix and the recurrent projection
if isinstance(matrix, AutoAssociativeProjection):
self.recurrent_projection = matrix
Expand All @@ -974,23 +986,6 @@
if self.learning_enabled:
self.configure_learning(context=context)

if ENERGY_OUTPUT_PORT_NAME in self.output_ports.names:
energy = Stability(self.defaults.variable[0],
metric=ENERGY,
transfer_fct=self.function,
matrix=self.recurrent_projection._parameter_ports[MATRIX])
self.output_ports[ENERGY_OUTPUT_PORT_NAME]._calculate = energy.function

if ENTROPY_OUTPUT_PORT_NAME in self.output_ports.names:
if self.function.bounds == (0,1) or self.clip == (0,1):
entropy = Stability(self.defaults.variable[0],
metric=ENTROPY,
transfer_fct=self.function,
matrix=self.recurrent_projection._parameter_ports[MATRIX])
self.output_ports[ENTROPY_OUTPUT_PORT_NAME]._calculate = entropy.function
else:
del self.output_ports[ENTROPY_OUTPUT_PORT_NAME]

def _update_parameter_ports(self, runtime_params=None, context=None):
for port in self._parameter_ports:
# (8/2/17 CW) because the auto and hetero params are solely used by the AutoAssociativeProjection
Expand Down
35 changes: 35 additions & 0 deletions tests/mechanisms/test_recurrent_transfer_mechanism.py
Original file line number Diff line number Diff line change
Expand Up @@ -1008,6 +1008,41 @@ def test_clip_2d_array(self):
np.testing.assert_allclose(R.execute([[-5.0, -1.0, 5.0], [5.0, -5.0, 1.0], [1.0, 5.0, 5.0]]),
[[-2.0, -1.0, 2.0], [2.0, -2.0, 1.0], [1.0, 2.0, 2.0]])


class TestStandardOutputPorts:
def test_rtn_energy(self):
"""Test use of ENERGY OutputPort"""
# Get reference value
e = pnl.Energy(input_shapes=2, matrix=[[0,-1],[-1,0]])
reference = e((0.5124973964842103,0.5124973964842103))
assert reference == 0.26265358140309386

lca_mech = pnl.LCAMechanism( input_shapes=2, output_ports=[pnl.RESULT, pnl.ENERGY])
comp = pnl.Composition(lca_mech)
result = comp.run(inputs=[1,1])
energy_matrix = lca_mech.output_ports[1].function.matrix
energy_value = lca_mech.output_ports[1].value
assert (energy_matrix == [[0,-1],[-1,0]]).all()
assert energy_value == reference
assert (result[0] == [[0.5124973964842103,0.5124973964842103]]).all()
assert result[1] == reference

def test_rtn_entropy(self):
"""Test use of ENTROPY OutputPort"""
# Get reference value
e = pnl.Entropy(input_shapes=2)
reference = e((0.5124973964842103,0.5124973964842103))
assert reference == 0.6851676585231217

lca_mech = pnl.LCAMechanism( input_shapes=2, output_ports=[pnl.RESULT, pnl.ENTROPY])
comp = pnl.Composition(lca_mech)
result = comp.run(inputs=[1,1])
entropy_value = lca_mech.output_ports[1].value
assert entropy_value == reference
assert (result[0] == [[0.5124973964842103,0.5124973964842103]]).all()
assert result[1] == reference


@pytest.mark.composition
class TestRecurrentInputPort:

Expand Down
Loading