% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Patnala:1008330,
      author       = {Patnala, Ankit and Stadtler, Scarlet and Schultz, Martin G.
                      and Gall, Juergen},
      title        = {{G}enerating {V}iews {U}sing {A}tmospheric {C}orrection for
                      {C}ontrastive {S}elf-{S}upervised {L}earning of
                      {M}ultispectral {I}mages},
      journal      = {IEEE geoscience and remote sensing letters},
      volume       = {20},
      number       = {2502305},
      issn         = {1545-598X},
      address      = {New York, NY},
      publisher    = {IEEE},
      reportid     = {FZJ-2023-02292},
      pages        = {1 - 5},
      year         = {2023},
      abstract     = {In remote sensing, plenty of multispectral images are
                      publicly available from various landcover satellite
                      missions. Contrastive self-supervised learning is commonly
                      applied to unlabeled data but relies on domain-specific
                      transformations used for learning. When focusing on
                      vegetation, standard transformations from image processing
                      cannot be applied to the near-infrared (NIR) channel, which
                      carries valuable information about the vegetation state.
                      Therefore, we use contrastive learning, relying on different
                      views of unlabeled, multispectral images to obtain a
                      pretrained model to improve the accuracy scores on
                      small-sized remote sensing datasets. This study presents the
                      generation of additional views tailored to remote sensing
                      images using atmospheric correction as an alternative
                      transformation to color jittering. The purpose of the
                      atmospheric transformation is to provide a physically
                      consistent transformation. The proposed transformation can
                      be easily integrated with multiple channels to exploit
                      spectral signatures of objects. Our approach can be applied
                      to other remote sensing tasks. Using this transformation
                      leads to improved classification accuracy of up to $6\%.$},
      cin          = {JSC},
      ddc          = {550},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5111 - Domain-Specific Simulation $\&$ Data Life Cycle Labs
                      (SDLs) and Research Groups (POF4-511) / Deep Learning for
                      Air Quality and Climate Forecasts $(deepacf_20191101)$ /
                      Earth System Data Exploration (ESDE) / AI Strategy for Earth
                      system data $(kiste_20200501)$},
      pid          = {G:(DE-HGF)POF4-5111 / $G:(DE-Juel1)deepacf_20191101$ /
                      G:(DE-Juel-1)ESDE / $G:(DE-Juel1)kiste_20200501$},
      typ          = {PUB:(DE-HGF)16},
      UT           = {WOS:000995888700003},
      doi          = {10.1109/LGRS.2023.3274493},
      url          = {https://juser.fz-juelich.de/record/1008330},
}