% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@INPROCEEDINGS{Scharr:889315,
      author       = {Scharr, Hanno and Bruns, Benjamin and Fischbach, Andreas
                      and Roussel, Johanna and Scholtes, Lukas and vom Stein,
                      Jonas},
      title        = {{G}ermination {D}etection of {S}eedlings in {S}oil: {A}
                      {S}ystem, {D}ataset and {C}hallenge},
      volume       = {12540},
      address      = {Cambridge},
      publisher    = {Springer},
      reportid     = {FZJ-2021-00207},
      series       = {Lecture Notes in Computer Science},
      pages        = {360 - 374},
      year         = {2020},
      comment      = {Computer Vision – ECCV 2020 Workshops},
      booktitle     = {Computer Vision – ECCV 2020
                       Workshops},
      abstract     = {In phenotyping experiments plants are often germinated in
                      high numbers, and in a manual transplantation step selected
                      and moved to single pots. Selection is based on visually
                      derived germination date, visual size, or health inspection.
                      Such values are often inaccurate, as evaluating thousands of
                      tiny seedlings is tiring. We address these issues by
                      quantifying germination detection with an automated,
                      imaging-based device, and by a visual support system for
                      inspection and transplantation. While this is a great help
                      and reduces the need for visual inspection, accuracy of
                      seedling detection is not yet sufficient to allow skipping
                      the inspection step. We therefore present a new dataset and
                      challenge containing 19.5k images taken by our germination
                      detection system and manually verified labels. We describe
                      in detail the involved automated system and handling setup.
                      As baseline we report the performances of the currently
                      applied color-segmentation based algorithm and of five
                      transfer-learned deep neural networks.},
      month         = {Aug},
      date          = {2020-08-23},
      organization  = {16th European Conference on Computer
                       Vision, Glasgow, UK (UK), 23 Aug 2020 -
                       28 Aug 2020},
      cin          = {IBG-2},
      cid          = {I:(DE-Juel1)IBG-2-20101118},
      pnm          = {583 - Innovative Synergisms (POF3-583) / 582 - Plant
                      Science (POF3-582)},
      pid          = {G:(DE-HGF)POF3-583 / G:(DE-HGF)POF3-582},
      typ          = {PUB:(DE-HGF)8 / PUB:(DE-HGF)7},
      UT           = {WOS:001500596600025},
      doi          = {10.1007/978-3-030-65414-6_25},
      url          = {https://juser.fz-juelich.de/record/889315},
}