Skip to content

Commit

Permalink
Merge pull request #100 from synsense/hotfix/99_snn_analyzer
Browse files Browse the repository at this point in the history
Fix issue #99, add unit test
  • Loading branch information
biphasic authored Jul 1, 2023
2 parents 5bcfd51 + edaf802 commit df3a113
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 1 deletion.
2 changes: 1 addition & 1 deletion sinabs/synopcounter.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def _setup_hooks(self):
unflattened_shape = (layer.batch_size, layer.num_timesteps)

for layer in self.model.modules():
if isinstance(layer, sl.StatefulLayer):
if isinstance(layer, sl.StatefulLayer) and layer.does_spike:
layer.acc_output = torch.tensor(0)
layer.n_batches = 0
handle = layer.register_forward_hook(spiking_hook)
Expand Down
20 changes: 20 additions & 0 deletions tests/test_synops_counter.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,26 @@ def test_spiking_layer_firing_rate():
assert layer_stats["firing_rate_per_neuron"].mean() == 0.25


def test_nonspiking_stateful_layer():
model = nn.Sequential(sl.IAF(), sl.ExpLeak(tau_mem=10))
input_ = torch.eye(4).unsqueeze(0).unsqueeze(0)

analyzer = sinabs.SNNAnalyzer(model)
output = model(input_)
model_stats = analyzer.get_model_statistics(average=True)
assert model_stats["firing_rate"] == 0.25

layer_stats = analyzer.get_layer_statistics(average=True)
# ExpLeak layer should not show up in spiking or parameter stats
assert "1" not in layer_stats["spiking"]
assert "1" not in layer_stats["parameter"]

spiking_layer_stats = layer_stats["spiking"]["0"]
assert spiking_layer_stats["firing_rate"] == 0.25
assert spiking_layer_stats["firing_rate_per_neuron"].shape == (4, 4)
assert spiking_layer_stats["firing_rate_per_neuron"].mean() == 0.25


def test_spiking_layer_firing_rate_across_batches():
layer = sl.IAF()
input1 = torch.eye(4).unsqueeze(0).unsqueeze(0)
Expand Down

0 comments on commit df3a113

Please sign in to comment.