000896737 001__ 896737
000896737 005__ 20210930133542.0
000896737 0247_ $$2doi$$a10.3389/fpsyg.2021.673982
000896737 0247_ $$2Handle$$a2128/28674
000896737 0247_ $$2altmetric$$aaltmetric:111348368
000896737 0247_ $$2pmid$$a34421731
000896737 0247_ $$2WOS$$aWOS:000687058500001
000896737 037__ $$aFZJ-2021-03562
000896737 041__ $$aEnglish
000896737 082__ $$a150
000896737 1001_ $$0P:(DE-HGF)0$$aHartz, Arne$$b0
000896737 245__ $$aTemporal Behavioral Parameters of On-Going Gaze Encounters in a Virtual Environment
000896737 260__ $$aLausanne$$bFrontiers Research Foundation$$c2021
000896737 3367_ $$2DRIVER$$aarticle
000896737 3367_ $$2DataCite$$aOutput Types/Journal article
000896737 3367_ $$0PUB:(DE-HGF)16$$2PUB:(DE-HGF)$$aJournal Article$$bjournal$$mjournal$$s1632223411_30180
000896737 3367_ $$2BibTeX$$aARTICLE
000896737 3367_ $$2ORCID$$aJOURNAL_ARTICLE
000896737 3367_ $$00$$2EndNote$$aJournal Article
000896737 520__ $$aTo navigate the social world, humans heavily rely on gaze for non-verbal communication as it conveys information in a highly dynamic and complex, yet concise manner: For instance, humans utilize gaze effortlessly to direct and infer the attention of a possible interaction partner. Many traditional paradigms in social gaze research though rely on static ways of assessing gaze interaction, e.g., by using images or prerecorded videos as stimulus material. Emerging gaze contingent paradigms, in which algorithmically controlled virtual characters can respond flexibly to the gaze behavior of humans, provide high ecological validity. Ideally, these are based on models of human behavior which allow for precise, parameterized characterization of behavior, and should include variable interactive settings and different communicative states of the interacting agents. The present study provides a complete definition and empirical description of a behavioral parameter space of human gaze behavior in extended gaze encounters. To this end, we (i) modeled a shared 2D virtual environment on a computer screen in which a human could interact via gaze with an agent and simultaneously presented objects to create instances of joint attention and (ii) determined quantitatively the free model parameters (temporal and probabilistic) of behavior within this environment to provide a first complete, detailed description of the behavioral parameter space governing joint attention. This knowledge is essential to enable the modeling of interacting agents with a high degree of ecological validity, be it for cognitive studies or applications in human-robot interaction.
000896737 536__ $$0G:(DE-HGF)POF4-5251$$a5251 - Multilevel Brain Organization and Variability (POF4-525)$$cPOF4-525$$fPOF IV$$x0
000896737 588__ $$aDataset connected to CrossRef, Journals: juser.fz-juelich.de
000896737 7001_ $$0P:(DE-HGF)0$$aGuth, Björn$$b1
000896737 7001_ $$0P:(DE-Juel1)177842$$aJording, Mathis$$b2
000896737 7001_ $$0P:(DE-Juel1)176404$$aVogeley, Kai$$b3$$ufzj
000896737 7001_ $$0P:(DE-Juel1)131741$$aSchulte-Rüther, Martin$$b4$$eCorresponding author
000896737 773__ $$0PERI:(DE-600)2563826-9$$a10.3389/fpsyg.2021.673982$$gVol. 12, p. 673982$$p673982$$tFrontiers in psychology$$v12$$x1664-1078$$y2021
000896737 8564_ $$uhttps://juser.fz-juelich.de/record/896737/files/Hartz_2021_Front%20Psychol_Temporal%20behavioral%20parameters....pdf$$yOpenAccess
000896737 909CO $$ooai:juser.fz-juelich.de:896737$$pdnbdelivery$$pdriver$$pVDB$$popen_access$$popenaire
000896737 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)177842$$aForschungszentrum Jülich$$b2$$kFZJ
000896737 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)176404$$aForschungszentrum Jülich$$b3$$kFZJ
000896737 9101_ $$0I:(DE-588b)5008462-8$$6P:(DE-Juel1)131741$$aForschungszentrum Jülich$$b4$$kFZJ
000896737 9131_ $$0G:(DE-HGF)POF4-525$$1G:(DE-HGF)POF4-520$$2G:(DE-HGF)POF4-500$$3G:(DE-HGF)POF4$$4G:(DE-HGF)POF$$9G:(DE-HGF)POF4-5251$$aDE-HGF$$bKey Technologies$$lNatural, Artificial and Cognitive Information Processing$$vDecoding Brain Organization and Dysfunction$$x0
000896737 9141_ $$y2021
000896737 915__ $$0StatID:(DE-HGF)0200$$2StatID$$aDBCoverage$$bSCOPUS$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0160$$2StatID$$aDBCoverage$$bEssential Science Indicators$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0130$$2StatID$$aDBCoverage$$bSocial Sciences Citation Index$$d2021-01-30
000896737 915__ $$0LIC:(DE-HGF)CCBY4$$2HGFVOC$$aCreative Commons Attribution CC BY 4.0
000896737 915__ $$0StatID:(DE-HGF)0100$$2StatID$$aJCR$$bFRONT PSYCHOL : 2019$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)1180$$2StatID$$aDBCoverage$$bCurrent Contents - Social and Behavioral Sciences$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0501$$2StatID$$aDBCoverage$$bDOAJ Seal$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0500$$2StatID$$aDBCoverage$$bDOAJ$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)1110$$2StatID$$aDBCoverage$$bCurrent Contents - Clinical Medicine$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0700$$2StatID$$aFees$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)9900$$2StatID$$aIF < 5$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0510$$2StatID$$aOpenAccess
000896737 915__ $$0StatID:(DE-HGF)0030$$2StatID$$aPeer Review$$bDOAJ : Blind peer review$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0561$$2StatID$$aArticle Processing Charges$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0300$$2StatID$$aDBCoverage$$bMedline$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0320$$2StatID$$aDBCoverage$$bPubMed Central$$d2021-01-30
000896737 915__ $$0StatID:(DE-HGF)0199$$2StatID$$aDBCoverage$$bClarivate Analytics Master Journal List$$d2021-01-30
000896737 920__ $$lyes
000896737 9201_ $$0I:(DE-Juel1)INM-3-20090406$$kINM-3$$lKognitive Neurowissenschaften$$x0
000896737 980__ $$ajournal
000896737 980__ $$aVDB
000896737 980__ $$aUNRESTRICTED
000896737 980__ $$aI:(DE-Juel1)INM-3-20090406
000896737 9801_ $$aFullTexts