% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@PHDTHESIS{Schuchert:7568,
      author       = {Schuchert, T.},
      title        = {{P}lant {L}eaf {M}otion {E}stimation {U}sing {A} 5{D}
                      {A}ffine {O}ptical {F}low {M}odel},
      school       = {RWTH Aachen},
      type         = {Dr. (Univ.)},
      reportid     = {PreJuSER-7568},
      year         = {2010},
      note         = {Record converted from VDB: 12.11.2012; Aachen, RWTH, Diss.,
                      2010},
      abstract     = {High accuracy motion analysis of plant leafs is of great
                      interest for plant physiology, e.g., estimation of plant
                      leaf orientation, or temporal and spatial growth maps, which
                      are determined by divergence of 3D leaf motion. In this work
                      a new method for plant leaf motion estimation is presented.
                      The model is based on 5D affine optical flow, which allows
                      simultaneous estimation of 3D structure, normals and 3D
                      motion of objects using multi camera data. The method
                      consists of several consecutive estimation procedures. In a
                      first step the affine transformation in a 5D data set, i.e.,
                      3D image sequences (x,y,t) of a 2D camera grid (sx,sy) is
                      estimated within a differential framework. In this work the
                      differential framework, based on an optical flow model, is
                      extended by explicitly modeling of illumination changes. A
                      second estimation process yields 3D structure and 3D motion
                      parameters from the affine optical flow parameters. Modeling
                      the 3D scene with local surface patches allows to derive a
                      matrix defining the projection of 3D structure and 3D motion
                      onto each camera sensor. The inverse projection matrix is
                      used to estimate 3D structure (depth and surface normals)
                      and 3D motion, including translation, rotation and
                      acceleration from up to 24 affine optical flow parameters.
                      In order to stabilize the estimation process optical flow
                      parameters are estimated additionally separated for all
                      cameras. A least squares estimator yields the solution
                      minimizing the difference between optical flow parameters
                      and the back projection of the 3D scene motion onto all
                      cameras. Experiments on synthetic data demonstrate improved
                      accuracy and improved robustness against illumination
                      changes compared to methods proposed in recent literature.
                      Moreover the new method allows estimation of additional
                      parameters like surface normals, rotation and acceleration.
                      Finally, plant data acquired under typical laboratory
                      conditions is analyzed, showing the applicability of the
                      method for plant physiology.},
      cin          = {ICG-3},
      cid          = {I:(DE-Juel1)ICG-3-20090406},
      pnm          = {Terrestrische Umwelt},
      pid          = {G:(DE-Juel1)FUEK407},
      typ          = {PUB:(DE-HGF)11},
      url          = {https://juser.fz-juelich.de/record/7568},
}