% IMPORTANT: The following is UTF-8 encoded. This means that in the presence % of non-ASCII characters, it will not work with BibTeX 0.99 or older. % Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or % “biber”. @INPROCEEDINGS{Neftci:1025210, author = {Neftci, Emre and Yu, Zhenming and Leroux, Nathan}, title = {{T}raining-to-{L}earn with {M}emristive {D}evices}, reportid = {FZJ-2024-02778}, year = {2023}, abstract = {Memristive crossbar arrays are promising non-von Neumann computing technologies to enable real-world, onlinelearning in neural networks. However, their deployment to real-world learning problems is hindered by their non-linearitiesin conductance updates, variation during operation, fabrication mismatch and the realities of gradient descent training. In thiswork, we show that, with a phenomenological model of the device and bi-level optimization, it is possible to pre-train the neuralnetwork to be largely insensitive to such non-idealities on learning tasks. We demonstrate this effect using Model Agnostic Meta Learning (MAML) and a differentiable model of the conductance update on the Omniglot few-shot learning task. Since pre-training is a necessary procedure for any on-line learning scenario at the edge, our results may pave the way towards real-world applications of memristive devices without significant adaption overhead.}, month = {Jan}, date = {2023-01-23}, organization = {Neuromorphic Materials, Devices, Circuits and Systems, València (Spain), 23 Jan 2023 - 25 Jan 2023}, subtyp = {Invited}, cin = {PGI-15}, cid = {I:(DE-Juel1)PGI-15-20210701}, pnm = {5234 - Emerging NC Architectures (POF4-523)}, pid = {G:(DE-HGF)POF4-5234}, typ = {PUB:(DE-HGF)6}, doi = {10.29363/nanoge.neumatdecas.2023.013}, url = {https://juser.fz-juelich.de/record/1025210}, }