Skip to content

Commit

Permalink
rm unused imports
Browse files Browse the repository at this point in the history
  • Loading branch information
weidel-p committed Feb 15, 2023
1 parent efacdd4 commit d6e057f
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion src/lava/magma/core/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import typing as ty
import logging
from abc import ABC, abstractmethod
from abc import ABC

if ty.TYPE_CHECKING:
from lava.magma.core.process.process import AbstractProcess
Expand Down
8 changes: 4 additions & 4 deletions src/lava/magma/core/model/py/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,21 +116,21 @@ def _get_var(self):
data_port = self.process_to_service
# Header corresponds to number of values
# Data is either send once (for int) or one by one (array)
if isinstance(var, int) or isinstance(var, np.integer):
if isinstance(var, int) or isinstance(var, np.int32):
data_port.send(enum_to_np(1))
data_port.send(enum_to_np(var))
elif isinstance(var, np.ndarray):
# FIXME: send a whole vector (also runtime_service.py)
var_iter = np.nditer(var, order="C")
num_items: np.integer = np.prod(var.shape)
num_items: np.int32 = np.prod(var.shape)
data_port.send(enum_to_np(num_items))
for value in var_iter:
data_port.send(enum_to_np(value, np.float64))
elif isinstance(var, str):
encoded_str = list(var.encode("ascii"))
data_port.send(enum_to_np(len(encoded_str)))
for ch in encoded_str:
data_port.send(enum_to_np(ch, d_type=np.integer))
data_port.send(enum_to_np(ch, d_type=np.int32))

def _set_var(self):
"""Handles the set Var command from runtime service."""
Expand All @@ -141,7 +141,7 @@ def _set_var(self):

# 2. Receive Var data
data_port = self.service_to_process
if isinstance(var, int) or isinstance(var, np.integer):
if isinstance(var, int) or isinstance(var, np.int32):
# First item is number of items (1) - not needed
data_port.recv()
# Data to set
Expand Down
4 changes: 2 additions & 2 deletions tests/lava/magma/core/learning/test_learning_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,15 @@ def create_network(
dense_inp = Dense(weights=np.eye(size, size) * 2.0)

lif_0 = LIF(
shape=(size,), du=du, dv=dv, vth=vth, bias_mant=0.0, name="lif_pre"
shape=(size,), du=du, dv=dv, vth=vth, bias_mant=0, name="lif_pre"
)

dense = LearningDense(
weights=weights_init, learning_rule=learning_rule, name="plastic_dense"
)

lif_1 = LIF(
shape=(size,), du=du, dv=dv, vth=vth, bias_mant=0.0, name="lif_post"
shape=(size,), du=du, dv=dv, vth=vth, bias_mant=0, name="lif_post"
)

spike_gen.s_out.connect(dense_inp.s_in)
Expand Down

0 comments on commit d6e057f

Please sign in to comment.