% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Friederich:1037178,
      author       = {Friederich, Nils and Yamachui Sitcheu, A. J. and Nassal,
                      Annika and Pesch, Matthias and Yildiz, Erenus and Beichter,
                      Maximilian and Scholtes, Lukas and Akbaba, Bahar and
                      Lautenschlager, Thomas and Neumann, Oliver and Kohlheyer,
                      Dietrich and Scharr, Hanno and Seiffarth, Johannes and Nöh,
                      Katharina and Mikut, Ralf},
      title        = {{EAP}4{EMSIG} -- {E}xperiment {A}utomation {P}ipeline for
                      {E}vent-{D}riven {M}icroscopy to {S}mart {M}icrofluidic
                      {S}ingle-{C}ells {A}nalysis},
      publisher    = {arXiv},
      reportid     = {FZJ-2025-00523},
      year         = {2024},
      note         = {arXiv, arXiv:2411.05030 [q-bio.QM]},
      abstract     = {Microfluidic Live-Cell Imaging (MLCI) generates
                      high-quality data that allows biotechnologists to study
                      cellular growth dynamics in detail. However, obtaining these
                      continuous data over extended periods is challenging,
                      particularly in achieving accurate and consistent real-time
                      event classification at the intersection of imaging and
                      stochastic biology. To address this issue, we introduce the
                      Experiment Automation Pipeline for Event-Driven Microscopy
                      to Smart Microfluidic Single-Cells Analysis (EAP4EMSIG). In
                      particular, we present initial zero-shot results from the
                      real-time segmentation module of our approach. Our findings
                      indicate that among four State-Of-The- Art (SOTA)
                      segmentation methods evaluated, Omnipose delivers the
                      highest Panoptic Quality (PQ) score of 0.9336, while Contour
                      Proposal Network (CPN) achieves the fastest inference time
                      of 185 ms with the second-highest PQ score of 0.8575.
                      Furthermore, we observed that the vision foundation model
                      Segment Anything is unsuitable for this particular use
                      case.},
      keywords     = {Quantitative Methods (q-bio.QM) (Other) / Computer Vision
                      and Pattern Recognition (cs.CV) (Other) / Image and Video
                      Processing (eess.IV) (Other) / FOS: Biological sciences
                      (Other) / FOS: Computer and information sciences (Other) /
                      FOS: Electrical engineering, electronic engineering,
                      information engineering (Other)},
      cin          = {IBG-1 / IAS-8},
      cid          = {I:(DE-Juel1)IBG-1-20101118 / I:(DE-Juel1)IAS-8-20210421},
      pnm          = {2171 - Biological and environmental resources for
                      sustainable use (POF4-217)},
      pid          = {G:(DE-HGF)POF4-2171},
      typ          = {PUB:(DE-HGF)25},
      doi          = {10.48550/ARXIV.2411.05030},
      url          = {https://juser.fz-juelich.de/record/1037178},
}