% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Oberstra:1031453,
author = {Oberstraß, Alexander and DeKraker, Jordan and Münzing,
Sascha and Evans, Alan C. and Axer, Markus and Amunts,
Katrin and Dickscheid, Timo},
title = {{A}nalyzing {R}egional {O}rganization of the {H}uman
{H}ippocampus in 3{D}-{PLI} {U}sing {C}ontrastive {L}earning
and {G}eometric {U}nfolding},
school = {Heinrich-Heine-University Düsseldorf},
reportid = {FZJ-2024-05672},
year = {2024},
abstract = {Quantifiable and interpretable descriptors of nerve fiber
architecture at microscopic resolution are an important
basis for a deeper understanding of human brain
architecture. 3D polarized light imaging (3D-PLI) provides
detailed insights into the course and geometry of nerve
fibers in whole postmortem brain sections, represented in
large datasets. The large amounts of data, combined with
complex textures in 3D-PLI images, however, make analysis
challenging and limit access to data annotations. To this
end, we propose using self-supervised contrastive learning
to extract deep texture features for fiber architecture in
3D-PLI. We use the texture features to analyze the regional
organization of the human hippocampus in combination with
geometric unfolding to reduce the effects of its folded
topology and project the features to a canonical reference
space.We analyze the fiber architecture of a human
hippocampus of an 87-year-old male, measured with a
polarizing microscope (PM) at 1.3 µm in-plane resolution on
60 µm thick brain sections. The volume comprises 545 brain
sections, each 26757 × 22734 pixels in size. We apply
contrastive learning to learn robust and descriptive
representations by contrasting similar (positive) and
dissimilar (negative) pairs of texture examples. Here, we
leverage the volume reconstruction of individual brain
sections in the learning objective to identify positive
pairs based on a fixed distance between example image
patches either in-plane (CL-2D) or across brain sections in
3D (CL-3D) (Fig. 1A). The objective is used to train a
width-reduced ResNet-50 architecture on the full
hippocampus, extracting 256 texture features for square
patches of 128 pixels size (166 µm). After training,
inference is performed using a sliding window approach to
generate feature maps for whole brain sections (Fig. 1B). To
analyze the folded architecture of the hippocampus, we apply
HippUnfold and sample features from the feature maps at
multiple depths of the pyramidal layer of the hippocampal
Cornu ammonis (CA) region and the subicular complex (Fig.
1C). Subsequently, PCA is performed to reduce feature
dimensionality for visualization and improve computational
stability in further analysis (Fig. 1E).To assess how well
the deep texture features reflect the regional organization
of the hippocampus, we perform k-means clustering for 6
clusters and compare the results with subfield labels.
Clusters in CL-3D features show good visual agreement with
hippocampal CA1 - CA4 regions and the subicular complex. In
terms of mutual information (0.72), they align more clearly
compared to clustering of baseline characterizations based
on fractional anisotropy and mean transmittance (0.40), as
well as CL-2D (0.61).Without any supervisory signal, CL-3D
features form a well-structured embedding space, following
the general regional organization pattern of the hippocampus
and additionally highlight an expected functional
rostro-caudal heterogeneity. Projecting deep texture
features to unfolded space using HippUnfold enables
subsequent comparison with diverse modalities. This work
thus lays the foundation for incorporating 3D-PLI texture
information into a comprehensive multimodal mapping of the
human hippocampus.},
month = {Sep},
date = {2024-09-09},
organization = {8th BigBrain Workshop, Padua (Italy),
9 Sep 2024 - 11 Sep 2024},
subtyp = {After Call},
cin = {INM-1},
cid = {I:(DE-Juel1)INM-1-20090406},
pnm = {5254 - Neuroscientific Data Analytics and AI (POF4-525) /
HIBALL - Helmholtz International BigBrain Analytics and
Learning Laboratory (HIBALL) (InterLabs-0015) / EBRAINS 2.0
- EBRAINS 2.0: A Research Infrastructure to Advance
Neuroscience and Brain Health (101147319) / Helmholtz AI -
Helmholtz Artificial Intelligence Coordination Unit –
Local Unit FZJ (E.40401.62)},
pid = {G:(DE-HGF)POF4-5254 / G:(DE-HGF)InterLabs-0015 /
G:(EU-Grant)101147319 / G:(DE-Juel-1)E.40401.62},
typ = {PUB:(DE-HGF)6},
url = {https://juser.fz-juelich.de/record/1031453},
}