% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Mller:19627,
author = {Müller, V.I. and Cieslik, E.C. and Turetsky, B.I. and
Eickhoff, S.B.},
title = {{C}rossmodal interactions in audiovisual emotion
processing},
journal = {NeuroImage},
volume = {60},
issn = {1053-8119},
address = {Orlando, Fla.},
publisher = {Academic Press},
reportid = {PreJuSER-19627},
pages = {553 - 561},
year = {2012},
note = {Record converted from VDB: 12.11.2012},
abstract = {Emotion in daily life is often expressed in a multimodal
fashion. Consequently emotional information from one
modality can influence processing in another. In a previous
fMRI study we assessed the neural correlates of audio-visual
integration and found that activity in the left amygdala is
significantly attenuated when a neutral stimulus is paired
with an emotional one compared to conditions where emotional
stimuli were present in both channels. Here we used dynamic
causal modelling to investigate the effective connectivity
in the neuronal network underlying this emotion presence
congruence effect. Our results provided strong evidence in
favor of a model family, differing only in the
interhemispheric interactions. All winning models share a
connection from the bilateral fusiform gyrus (FFG) into the
left amygdala and a non-linear modulatory influence of
bilateral posterior superior temporal sulcus (pSTS) on these
connections. This result indicates that the pSTS not only
integrates multi-modal information from visual and auditory
regions (as reflected in our model by significant
feed-forward connections) but also gates the influence of
the sensory information on the left amygdala, leading to
attenuation of amygdala activity when a neutral stimulus is
integrated. Moreover, we found a significant lateralization
of the FFG due to stronger driving input by the stimuli
(faces) into the right hemisphere, whereas such
lateralization was not present for sound-driven input into
the superior temporal gyrus. In summary, our data provides
further evidence for a rightward lateralization of the FFG
and in particular for a key role of the pSTS in the
integration and gating of audio-visual emotional
information.},
keywords = {Auditory Perception: physiology / Brain: physiology /
Emotions: physiology / Female / Humans / Magnetic Resonance
Imaging / Male / Visual Perception: physiology},
cin = {INM-2},
ddc = {610},
cid = {I:(DE-Juel1)INM-2-20090406},
pnm = {Funktion und Dysfunktion des Nervensystems (FUEK409) /
89571 - Connectivity and Activity (POF2-89571)},
pid = {G:(DE-Juel1)FUEK409 / G:(DE-HGF)POF2-89571},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:22182770},
UT = {WOS:000301218700057},
doi = {10.1016/j.neuroimage.2011.12.007},
url = {https://juser.fz-juelich.de/record/19627},
}