% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Miranda:917488,
      author       = {Miranda, Miro and Zabawa, Laura and Kicherer, Anna and
                      Strothmann, Laurenz and Rascher, Uwe and Roscher, Ribana},
      title        = {{D}etection of {A}nomalous {G}rapevine {B}erries {U}sing
                      {V}ariational {A}utoencoders},
      journal      = {Frontiers in plant science},
      volume       = {13},
      issn         = {1664-462X},
      address      = {Lausanne},
      publisher    = {Frontiers Media},
      reportid     = {FZJ-2023-00701},
      pages        = {729097},
      year         = {2022},
      abstract     = {Grapevine is one of the economically most important quality
                      crops. The monitoring of the plant performance during the
                      growth period is, therefore, important to ensure a high
                      quality end-product. This includes the observation,
                      detection, and respective reduction of unhealthy berries
                      (physically damaged, or diseased). At harvest, it is not
                      necessary to know the exact cause of the damage, but rather
                      if the damage is apparent or not. Since a manual screening
                      and selection before harvest is time-consuming and
                      expensive, we propose an automatic, image-based machine
                      learning approach, which can lead observers directly to
                      anomalous areas without the need to monitor every plant
                      manually. Specifically, we train a fully convolutional
                      variational autoencoder with a feature perceptual loss on
                      images with healthy berries only and consider image areas
                      with deviations from this model as damaged berries. We use
                      heatmaps which visualize the results of the trained neural
                      network and, therefore, support the decision making for
                      farmers. We compare our method against a convolutional
                      autoencoder that was successfully applied to a similar task
                      and show that our approach outperforms it.},
      cin          = {IBG-2},
      ddc          = {570},
      cid          = {I:(DE-Juel1)IBG-2-20101118},
      pnm          = {2171 - Biological and environmental resources for
                      sustainable use (POF4-217)},
      pid          = {G:(DE-HGF)POF4-2171},
      typ          = {PUB:(DE-HGF)16},
      pubmed       = {35720600},
      UT           = {WOS:000811892700001},
      doi          = {10.3389/fpls.2022.729097},
      url          = {https://juser.fz-juelich.de/record/917488},
}