% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Tetko:873632,
author = {State, Laura and Vilimelis Aceituno, Pau},
editor = {Tetko, Igor V. and Kůrková, Věra and Karpov, Pavel and
Theis, Fabian},
title = {{T}raining {D}elays in {S}piking {N}eural {N}etworks},
reportid = {FZJ-2020-00875},
isbn = {978-3-030-30486-7 (print)},
year = {2019},
abstract = {Spiking Neural Networks (SNNs) are a promising
computational paradigm, both to understand biological
information processing and for low-power, embedded chips.
Although SNNs are known to encode information in the precise
timing of spikes, conventional artificial learning
algorithms do not take this into account directly. In this
work, we implement the spike timing by training the synaptic
delays in a single layer SNN. We use two different
approaches: a classical gradient descent and a direct
algebraic method that is based on a complex-valued encoding
of the spikes. Both algorithms are equally able to correctly
solve simple detection tasks. Our work provides new
optimization methods for the data analysis of highly
time-dependent data and training methods for neuromorphic
chips.},
month = {Sep},
date = {2019-09-17},
organization = {ICANN 2019: Artificial Neural Networks
and Machine Learning – ICANN 2019:
Theoretical Neural Computation pp,
Munich (Germany), 17 Sep 2019 - 19 Sep
2019},
cin = {INM-6 / IAS-6 / INM-10},
cid = {I:(DE-Juel1)INM-6-20090406 / I:(DE-Juel1)IAS-6-20130828 /
I:(DE-Juel1)INM-10-20170113},
pnm = {574 - Theory, modelling and simulation (POF3-574) /
Smartstart - SMARTSTART Training Program in Computational
Neuroscience (90251)},
pid = {G:(DE-HGF)POF3-574 / G:(EU-Grant)90251},
typ = {PUB:(DE-HGF)1 / PUB:(DE-HGF)3},
UT = {WOS:000546494000054},
doi = {10.1007/978-3-030-30487-4_54},
url = {https://juser.fz-juelich.de/record/873632},
}