Skip to content

Commit

Permalink
Make arguments for conversion function explicit
Browse files Browse the repository at this point in the history
  • Loading branch information
ackurth-nc committed Sep 22, 2022
1 parent 2eddfcd commit b362062
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 24 deletions.
48 changes: 27 additions & 21 deletions tutorials/end_to_end/convert_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,34 +181,40 @@ def _beta(vth, mean, std, dv_exc, du_exc):

return val

def convert_rate_to_lif_params(**kwargs):
def convert_rate_to_lif_params(shape_exc, dr_exc, bias_exc, shape_inh, dr_inh,
bias_inh, g_factor, q_factor, weights, **kwargs):
'''Convert rate parameters to LIF parameters.
The mapping is based on A unified view on weakly correlated recurrent
network, Grytskyy et al. 2013.
Parameters
----------
kwargs : dict
Parameter dictionary for rate network
shape_exc : int
Number of excitatory neurons in rate network
dr_exc : float
Integration constant for excitatory neurons in rate network
bias_exc : float
Bias for excitatory neurons in rate network
shape_inh : int
Number of inhibitory neurons in rate network
dr_inh : float
Integration constant for inhibitory neurons in rate network
bias_inh : float
Bias for inhibitory neurons in rate network
g_factor : float
Factor controlling inhibition-excitation balance
q_factor : float
Factor controlling response properties of rate network
weights : np.ndarray
Recurrent weights of rate network
Returns
-------
lif_network_dict : dict
Parameter dictionary for LIF network
'''
# Fetch rate parameters.
shape_exc = kwargs['shape_exc']
dr_exc = kwargs['dr_exc']
bias_exc = kwargs['bias_exc']

shape_inh = kwargs['shape_inh']
dr_inh = kwargs['dr_inh']
bias_inh = kwargs['bias_inh']

g_factor = kwargs['g_factor']
q_factor = kwargs['q_factor']

weights = kwargs['weights'].copy()
# Copy weight parameters.
weights_local = weights.copy()

num_neurons_exc = shape_exc
num_neurons_inh = shape_inh
Expand Down Expand Up @@ -246,7 +252,7 @@ def convert_rate_to_lif_params(**kwargs):
bias_inh = 5 * vth_inh * dv_inh * rel_inh_exc_bias

# Get the mean excitatory weight.
exc_weights = weights[:, :num_neurons_exc]
exc_weights = weights_local[:, :num_neurons_exc]
mean_exc_weight = np.mean(exc_weights)

# Perform weight conversion.
Expand Down Expand Up @@ -289,13 +295,13 @@ def func(weight):

# Scale weights.
if weight_scale > 0:
weights *= weight_scale
weights_local *= weight_scale
else:
print('Weigh scaling factor not positive: No weight scaling possible')

# Scale weights with integration time step.
weights[:, :num_neurons_exc] *= du_exc
weights[:, num_neurons_exc:] *= du_inh
weights_local[:, :num_neurons_exc] *= du_exc
weights_local[:, num_neurons_exc:] *= du_inh

# Single neuron paramters.
# Bias_mant is set to make the neuron spike.
Expand All @@ -320,6 +326,6 @@ def func(weight):
network_params_lif.update(lif_params_inh)
network_params_lif['g_factor'] = g_factor
network_params_lif['q_factor'] = q_factor
network_params_lif['weights'] = weights
network_params_lif['weights'] = weights_local

return network_params_lif
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@
{
"cell_type": "code",
"execution_count": 6,
"id": "fb2221aa",
"id": "1caca08a",
"metadata": {},
"outputs": [],
"source": [
Expand Down Expand Up @@ -333,7 +333,7 @@
},
{
"cell_type": "markdown",
"id": "34a1d495",
"id": "7e500634",
"metadata": {},
"source": [
"Finally, we have to set the weights given the above constraints. To this end, we sample the weights randomly from a Gaussian distribution with zero-mean and a standard deviation that scales with the ```q_factor```."
Expand Down Expand Up @@ -987,7 +987,7 @@
{
"data": {
"text/plain": [
"[<matplotlib.lines.Line2D at 0x7ff5b4fc81f0>]"
"[<matplotlib.lines.Line2D at 0x7f56564df1f0>]"
]
},
"execution_count": 20,
Expand Down

0 comments on commit b362062

Please sign in to comment.