Update neurons according to activation function.
if "activation" in self.get_config() and \
self.get_config()["activation"] == "softmax":
output_spikes = softmax_activation(self, time, updates)
else:
output_spikes = linear_activation(self, time, updates)
After Change
if self.activation_str == "softmax":
output_spikes = softmax_activation(self, time, updates)
elif self.activation_str == "binary_sigmoid":
output_spikes = binary_sigmoid_activation(self, time, updates)
elif self.activation_str == "binary_tanh":
output_spikes = binary_tanh_activation(self, time, updates)
else:
output_spikes = linear_activation(self, time, updates)
else:
output_spikes = linear_activation(self, time, updates)
// Store refractory
new_refractory = T.set_subtensor(
self.refrac_until[output_spikes.nonzero()], time + self.tau_refrac)
updates.append((self.refrac_until, new_refractory))
updates.append((self.spiketrain, output_spikes * (time + settings["dt"])))
updates.append((self.spikecounts, self.spikecounts + output_spikes))