% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Sedona:1017949,
      author       = {Sedona, Rocco and Ebert, Jan and Paris, Claudia and Riedel,
                      Morris and Cavallaro, Gabriele},
      title        = {{E}nhancing {T}raining {S}et {T}hrough {M}ulti-{T}emporal
                      {A}ttention {A}nalysis in {T}ransformers for {M}ulti-{Y}ear
                      {L}and {C}over {M}apping},
      publisher    = {IEEE},
      reportid     = {FZJ-2023-04454},
      pages        = {5411-5414},
      year         = {2023},
      abstract     = {The continuous stream of high spatial resolution satellite
                      data offers the opportunity to regularly produce land cover
                      (LC) maps. To this end, Transformer deep learning (DL)
                      models have recently proven their effectiveness in
                      accurately classifying long time series (TS) of satellite
                      images. The continual generation of regularly updated LC
                      maps can be used to analyze dynamic phenomena and extract
                      multi-temporal information. However, several challenges need
                      to be addressed. Our paper aims to study how the performance
                      of a Transformer model changes when classifying TS of
                      satellite images acquired in years later than those in the
                      training set. In particular, the behavior of the attention
                      in the Transformer model is analyzed to determine when the
                      information provided by the initial training set needs to be
                      updated to keep generating accurate LC products. Preliminary
                      results show that: (i) the selection of the positional
                      encoding strategy used in the Transformer has a significant
                      impact on the classification accuracy obtained with
                      multi-year TS, and (ii) the most affected classes are the
                      seasonal ones.},
      month         = {Jul},
      date          = {2023-07-16},
      organization  = {IEEE International Geoscience and
                       Remote Sensing Symposium (IGARSS),
                       Pasadena (CA), 16 Jul 2023 - 21 Jul
                       2023},
      cin          = {JSC},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5111 - Domain-Specific Simulation $\&$ Data Life Cycle Labs
                      (SDLs) and Research Groups (POF4-511) / RAISE - Research on
                      AI- and Simulation-Based Engineering at Exascale (951733) /
                      EUROCC-2 (DEA02266)},
      pid          = {G:(DE-HGF)POF4-5111 / G:(EU-Grant)951733 /
                      G:(DE-Juel-1)DEA02266},
      typ          = {PUB:(DE-HGF)8},
      UT           = {WOS:001098971605148},
      doi          = {10.1109/IGARSS52108.2023.10283284},
      url          = {https://juser.fz-juelich.de/record/1017949},
}