001022168 001__ 1022168
001022168 005__ 20250204113759.0
001022168 0247_ $$2doi$$a10.1038/s41467-024-45063-y
001022168 0247_ $$2datacite_doi$$a10.34734/FZJ-2024-01289
001022168 0247_ $$2pmid$$a38280859
001022168 0247_ $$2WOS$$aWOS:001152430000011
001022168 037__ $$aFZJ-2024-01289
001022168 082__ $$a500
001022168 1001_ $$0P:(DE-Juel1)195747$$aSchoepe, Thorben$$b0$$eCorresponding author
001022168 245__ $$aFinding the gap: neuromorphic motion-vision in dense environments
001022168 260__ $$a[London]$$bNature Publishing Group UK$$c2024
001022168 3367_ $$2DRIVER$$aarticle
001022168 3367_ $$2DataCite$$aOutput Types/Journal article
001022168 3367_ $$0PUB:(DE-HGF)16$$2PUB:(DE-HGF)$$aJournal Article$$bjournal$$mjournal$$s1710401891_31529
001022168 3367_ $$2BibTeX$$aARTICLE
001022168 3367_ $$2ORCID$$aJOURNAL_ARTICLE
001022168 3367_ $$00$$2EndNote$$aJournal Article
001022168 520__ $$aAnimals have evolved mechanisms to travel safely and efficiently within different habitats. On a journey in dense terrains animals avoid collisions andcross narrow passages while controlling an overall course. Multiple hypotheses target how animals solve challenges faced during such travel. Here weshow that a single mechanism enables safe and efficient travel. We developed arobot inspired by insects. It has remarkable capabilities to travel in denseterrain, avoiding collisions, crossing gaps and selecting safe passages. Thesecapabilities are accomplished by a neuromorphic network steering the robottoward regions of low apparent motion. Our system leverages knowledgeabout vision processing and obstacle avoidance in insects. Our resultsdemonstrate how insects might safely travel through diverse habitats. Weanticipate our system to be a working hypothesis to study insects’ travels indense terrains. Furthermore, it illustrates that we can design novel hardwaresystems by understanding the underlying mechanisms driving behaviour.
001022168 536__ $$0G:(DE-HGF)POF4-5234$$a5234 - Emerging NC Architectures (POF4-523)$$cPOF4-523$$fPOF IV$$x0
001022168 588__ $$aDataset connected to CrossRef, Journals: juser.fz-juelich.de
001022168 7001_ $$0P:(DE-HGF)0$$aJanotte, Ella$$b1
001022168 7001_ $$0P:(DE-HGF)0$$aMilde, Moritz B.$$b2
001022168 7001_ $$0P:(DE-HGF)0$$aBertrand, Olivier J. N.$$b3
001022168 7001_ $$0P:(DE-HGF)0$$aEgelhaaf, Martin$$b4
001022168 7001_ $$0P:(DE-HGF)0$$aChicca, Elisabetta$$b5
001022168 773__ $$0PERI:(DE-600)2553671-0$$a10.1038/s41467-024-45063-y$$gVol. 15, no. 1, p. 817$$n1$$p817$$tNature Communications$$v15$$x2041-1723$$y2024
001022168 8564_ $$uhttps://juser.fz-juelich.de/record/1022168/files/Finding%20the%20gap%3A%20neuromorphic%20motion-vision%20in%20dense%20environments.pdf$$yOpenAccess
001022168 8564_ $$uhttps://juser.fz-juelich.de/record/1022168/files/Finding%20the%20gap%3A%20neuromorphic%20motion-vision%20in%20dense%20environments.gif?subformat=icon$$xicon$$yOpenAccess
001022168 8564_ $$uhttps://juser.fz-juelich.de/record/1022168/files/Finding%20the%20gap%3A%20neuromorphic%20motion-vision%20in%20dense%20environments.jpg?subformat=icon-1440$$xicon-1440$$yOpenAccess
001022168 8564_ $$uhttps://juser.fz-juelich.de/record/1022168/files/Finding%20the%20gap%3A%20neuromorphic%20motion-vision%20in%20dense%20environments.jpg?subformat=icon-180$$xicon-180$$yOpenAccess
001022168 8564_ $$uhttps://juser.fz-juelich.de/record/1022168/files/Finding%20the%20gap%3A%20neuromorphic%20motion-vision%20in%20dense%20environments.jpg?subformat=icon-640$$xicon-640$$yOpenAccess
001022168 8767_ $$8SN-2024-00427-b$$92024-05-28$$a1200203943$$d2024-06-05$$eAPC$$jZahlung erfolgt
001022168 8767_ $$8SN-2024-00427-b$$92024-05-28$$a1200203943$$d2024-06-05$$eAPC$$jZahlung angewiesen
001022168 909CO $$ooai:juser.fz-juelich.de:1022168$$pdnbdelivery$$popenCost$$pVDB$$pdriver$$pOpenAPC$$popen_access$$popenaire
001022168 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)195747$$aForschungszentrum Jülich$$b0$$kFZJ
001022168 9131_ $$0G:(DE-HGF)POF4-523$$1G:(DE-HGF)POF4-520$$2G:(DE-HGF)POF4-500$$3G:(DE-HGF)POF4$$4G:(DE-HGF)POF$$9G:(DE-HGF)POF4-5234$$aDE-HGF$$bKey Technologies$$lNatural, Artificial and Cognitive Information Processing$$vNeuromorphic Computing and Network Dynamics$$x0
001022168 9141_ $$y2024
001022168 915pc $$0PC:(DE-HGF)0000$$2APC$$aAPC keys set
001022168 915pc $$0PC:(DE-HGF)0001$$2APC$$aLocal Funding
001022168 915pc $$0PC:(DE-HGF)0002$$2APC$$aDFG OA Publikationskosten
001022168 915pc $$0PC:(DE-HGF)0003$$2APC$$aDOAJ Journal
001022168 915pc $$0PC:(DE-HGF)0113$$2APC$$aDEAL: Springer Nature 2020
001022168 915__ $$0StatID:(DE-HGF)0561$$2StatID$$aArticle Processing Charges$$d2023-08-29
001022168 915__ $$0StatID:(DE-HGF)0113$$2StatID$$aWoS$$bScience Citation Index Expanded$$d2023-08-29
001022168 915__ $$0StatID:(DE-HGF)0700$$2StatID$$aFees$$d2023-08-29
001022168 915__ $$0StatID:(DE-HGF)0510$$2StatID$$aOpenAccess
001022168 915__ $$0StatID:(DE-HGF)1190$$2StatID$$aDBCoverage$$bBiological Abstracts$$d2023-08-29
001022168 915__ $$0LIC:(DE-HGF)CCBY4$$2HGFVOC$$aCreative Commons Attribution CC BY 4.0
001022168 915__ $$0StatID:(DE-HGF)0160$$2StatID$$aDBCoverage$$bEssential Science Indicators$$d2023-08-29
001022168 915__ $$0StatID:(DE-HGF)0100$$2StatID$$aJCR$$bNAT COMMUN : 2022$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)0200$$2StatID$$aDBCoverage$$bSCOPUS$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)0300$$2StatID$$aDBCoverage$$bMedline$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)0501$$2StatID$$aDBCoverage$$bDOAJ Seal$$d2024-01-30T07:48:07Z
001022168 915__ $$0StatID:(DE-HGF)0500$$2StatID$$aDBCoverage$$bDOAJ$$d2024-01-30T07:48:07Z
001022168 915__ $$0StatID:(DE-HGF)0030$$2StatID$$aPeer Review$$bDOAJ : Peer review$$d2024-01-30T07:48:07Z
001022168 915__ $$0StatID:(DE-HGF)0199$$2StatID$$aDBCoverage$$bClarivate Analytics Master Journal List$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)1040$$2StatID$$aDBCoverage$$bZoological Record$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)1060$$2StatID$$aDBCoverage$$bCurrent Contents - Agriculture, Biology and Environmental Sciences$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)1150$$2StatID$$aDBCoverage$$bCurrent Contents - Physical, Chemical and Earth Sciences$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)1050$$2StatID$$aDBCoverage$$bBIOSIS Previews$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)1030$$2StatID$$aDBCoverage$$bCurrent Contents - Life Sciences$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)0150$$2StatID$$aDBCoverage$$bWeb of Science Core Collection$$d2025-01-02
001022168 915__ $$0StatID:(DE-HGF)9915$$2StatID$$aIF >= 15$$bNAT COMMUN : 2022$$d2025-01-02
001022168 920__ $$lyes
001022168 9201_ $$0I:(DE-Juel1)PGI-15-20210701$$kPGI-15$$lNeuromorphic Software Eco System$$x0
001022168 9801_ $$aFullTexts
001022168 980__ $$ajournal
001022168 980__ $$aVDB
001022168 980__ $$aUNRESTRICTED
001022168 980__ $$aI:(DE-Juel1)PGI-15-20210701
001022168 980__ $$aAPC