% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Haut:865543,
      author       = {Haut, Juan Mario and Gallardo, Jose Antonio and Paoletti,
                      Mercedes E. and Cavallaro, Gabriele and Plaza, Javier and
                      Plaza, Antonio and Riedel, Morris},
      title        = {{C}loud {D}eep {N}etworks for {H}yperspectral {I}mage
                      {A}nalysis},
      journal      = {IEEE transactions on geoscience and remote sensing},
      volume       = {57},
      number       = {12},
      issn         = {1558-0644},
      address      = {New York, NY},
      publisher    = {IEEE},
      reportid     = {FZJ-2019-04921},
      pages        = {9832 - 9848},
      year         = {2019},
      abstract     = {Advances in remote sensing hardware have led to a
                      significantly increased capability for high-quality data
                      acquisition, which allows the collection of remotely sensed
                      images with very high spatial, spectral, and radiometric
                      resolution. This trend calls for the development of new
                      techniques to enhance the way that such unprecedented
                      volumes of data are stored, processed, and analyzed. An
                      important approach to deal with massive volumes of
                      information is data compression, related to how data are
                      compressed before their storage or transmission. For
                      instance, hyperspectral images (HSIs) are characterized by
                      hundreds of spectral bands. In this sense, high-performance
                      computing (HPC) and high-throughput computing (HTC) offer
                      interesting alternatives. Particularly, distributed
                      solutions based on cloud computing can manage and store huge
                      amounts of data in fault-tolerant environments, by
                      interconnecting distributed computing nodes so that no
                      specialized hardware is needed. This strategy greatly
                      reduces the processing costs, making the processing of high
                      volumes of remotely sensed data a natural and even cheap
                      solution. In this paper, we present a new cloud-based
                      technique for spectral analysis and compression of HSIs.
                      Specifically, we develop a cloud implementation of a popular
                      deep neural network for non-linear data compression, known
                      as autoencoder (AE). Apache Spark serves as the backbone of
                      our cloud computing environment by connecting the available
                      processing nodes using a master-slave architecture. Our
                      newly developed approach has been tested using two widely
                      available HSI data sets. Experimental results indicate that
                      cloud computing architectures offer an adequate solution for
                      managing big remotely sensed data sets.},
      cin          = {JSC},
      ddc          = {620},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {512 - Data-Intensive Science and Federated Computing
                      (POF3-512)},
      pid          = {G:(DE-HGF)POF3-512},
      typ          = {PUB:(DE-HGF)16},
      UT           = {WOS:000505701800026},
      doi          = {10.1109/TGRS.2019.2929731},
      url          = {https://juser.fz-juelich.de/record/865543},
}