% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@INPROCEEDINGS{Kazimi:1033913,
author = {Kazimi, Bashir and Ruzaeva, Karina and Sandfeld, Stefan},
title = {{S}elf-{S}upervised {L}earning with {G}enerative
{A}dversarial {N}etworks for {E}lectron {M}icroscopy},
publisher = {IEEE},
reportid = {FZJ-2024-06751},
pages = {71-81},
year = {2024},
abstract = {In this work, we explore the potential of self-supervised
learning with Generative Adversarial Networks (GANs) for
electron microscopy datasets. We show how self-supervised
pretraining facilitates efficient fine-tuning for a spectrum
of downstream tasks, including semantic segmentation,
denoising, noise $\&$ background removal, and
super-resolution. Experimentation with varying model
complexities and receptive field sizes reveals the
remarkable phenomenon that fine-tuned models of lower
complexity consistently outperform more complex models with
random weight initialization. We demonstrate the versatility
of self-supervised pretraining across various downstream
tasks in the context of electron microscopy, allowing faster
convergence and better performance. We conclude that
self-supervised pretraining serves as a powerful catalyst,
being especially advantageous when limited annotated data
are available and efficient scaling of computational cost is
important.},
month = {Jun},
date = {2024-06-17},
organization = {2024 IEEE/CVF Conference on Computer
Vision and Pattern Recognition
Workshops (CVPRW), Seattle (WA), 17 Jun
2024 - 18 Jun 2024},
cin = {IAS-9},
cid = {I:(DE-Juel1)IAS-9-20201008},
pnm = {5111 - Domain-Specific Simulation $\&$ Data Life Cycle Labs
(SDLs) and Research Groups (POF4-511)},
pid = {G:(DE-HGF)POF4-5111},
typ = {PUB:(DE-HGF)8},
UT = {WOS:001327781700008},
doi = {10.1109/CVPRW63382.2024.00012},
url = {https://juser.fz-juelich.de/record/1033913},
}