% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Bencheikh:1037898,
author = {Bencheikh, Wadjih and Neftci, Emre and Bouhadjar, Younes},
title = {{T}raining {S}piking {N}eural {N}etworks to emulate
brain-like activity for optimal efficiency},
reportid = {FZJ-2025-01036},
year = {2024},
abstract = {Unlike neurons in Artificial Neural Networks (ANNs),
biological neurons operate in continuous real-timeand thus
possess the ability to represent timing information. They
use tiny pulses of voltages to communicatewith each other,
called spikes. The spikes are generated scarcely and at
precise times, especially in earlysensory regions,
contributing to the energy efficiency of brain circuitry.
Another feature of biologicalneurons is that are equipped
with an intrinsic memory of hundreds of milliseconds, which
keeps track of recentactivity. These operations have
inspired the development of a novel kind of neural network
(NNs) known asspiking NNs (SNNs). In this study, we aim to
identify the most effective combination of neuron types
andlearning methods in SNNs that yield high accuracy while
minimizing the firing rate of neurons. We train andanalyze
the resulting spiking activity of both a feedforward and a
recurrent network of Adaptive LIF (adLIF)neurons. Training
of the feedforward connections between the layers includes
training both the delays andweights. We implemented the
delays using 1D Dilated Convolution with Learnable Spacings
(DLSC) . Thetraining process utilizes Back-Propagation
Through Time (BPTT) with surrogate gradients. We employ
aregularization function to control the population firing
rate. This function penalizes deviations from a desiredrange
of neuronal activity, comprising terms for hypoactivity
(excessively low firing rates) and hyperactivity(excessively
high firing rates). Optimal hyperparameters are chosen based
on the average highest accuracy on5 different network
realizations. The hyperparameters include: dropout rate,
connectivity type (feedforwardor recurrent), regularization
parameters, maximum delay, and hidden size of the layer.
Note that we strictlyconstrain the range of values of the
maximum allowed firing rate to reach a regime of high
sparsity. We evaluateour network performance on classifying
digits from the Spiking Heidelberg Dataset (SHD). The
dataset waspostprocessed using a temporal bin of 10ms and
the training consists of 50 epochs. Our analysis shows that
therecurrent network including trainable delays in the
feedforward connections demonstrates the highest accuracyand
the lowest firing rate (accuracy = $91.05\%,$ firing rate =
1.02Hz), where the firing rate is computed as theaverage of
the population mean spikes count per second. The
corresponding spiking activity exhibits spikingbursts. This
is problematic as this limits the use of spatiotemporal
patterns, increases network latency in termsof information
processing, and is hardly represented by networks
implemented on neuromorphic hardware. Toreduce bursting
neurons, we incorporate a refractory period which resulted
in a firing rate of 1.56Hz. However,this came at the cost of
a drop in accuracy $(88.82\%).$ Future work will assess the
reasons behind this latter lossand devise alternative
implementation methods and novel learning methodologies.
Finally, we employed SpikePattern Detection and Evaluation
(SPADE) in our analysis. Despite our efforts, we have not
yet identifiedsignificant recurrent spatiotemporal patterns
within the spiking activity of the network. It remains to be
shownin a future study whether the representations and
patterns developed by SNNs trained by means of the
BPTTalgorithm resemble those observed in biological neural
networks.},
month = {Apr},
date = {2024-04-23},
organization = {Neuro-Inspired Computing Elements, La
Jolla, California (USA), 23 Apr 2024 -
26 Apr 2024},
subtyp = {After Call},
cin = {PGI-15 / PGI-7},
cid = {I:(DE-Juel1)PGI-15-20210701 / I:(DE-Juel1)PGI-7-20110106},
pnm = {5234 - Emerging NC Architectures (POF4-523) / BMBF
16ME0398K - Verbundprojekt: Neuro-inspirierte Technologien
der künstlichen Intelligenz für die Elektronik der Zukunft
- NEUROTEC II - (BMBF-16ME0398K) / BMBF 16ME0399 -
Verbundprojekt: Neuro-inspirierte Technologien der
künstlichen Intelligenz für die Elektronik der Zukunft -
NEUROTEC II - (BMBF-16ME0399)},
pid = {G:(DE-HGF)POF4-5234 / G:(DE-82)BMBF-16ME0398K /
G:(DE-82)BMBF-16ME0399},
typ = {PUB:(DE-HGF)6},
url = {https://juser.fz-juelich.de/record/1037898},
}