% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Schuchert:1797,
author = {Schuchert, T. and Aach, T. and Scharr, H.},
title = {{R}ange {F}low in {V}arying {I}llumination: {A}lgorithms
and {C}omparisons},
journal = {IEEE transactions on pattern analysis and machine
intelligence},
volume = {32},
issn = {0162-8828},
address = {New York, NY},
publisher = {IEEE},
reportid = {PreJuSER-1797},
pages = {1646 - 1658},
year = {2010},
note = {Record converted from VDB: 12.11.2012},
abstract = {We extend estimation of range flow to handle brightness
changes in image data caused by inhomogeneous illumination.
Standard range flow computes 3D velocity fields using both
range and intensity image sequences. Toward this end, range
flow estimation combines a depth change model with a
brightness constancy model. However, local brightness is
generally not preserved when object surfaces rotate relative
to the camera or the light sources, or when surfaces move in
inhomogeneous illumination. We describe and investigate
different approaches to handle such brightness changes. A
straightforward approach is to prefilter the intensity data
such that brightness changes are suppressed, for instance,
by a highpass or a homomorphic filter. Such prefiltering
may, though, reduce the signal-to-noise ratio. An
alternative novel approach is to replace the brightness
constancy model by 1) a gradient constancy model, or 2) by a
combination of gradient and brightness constancy constraints
used earlier successfully for optical flow, or 3) by a
physics-based brightness change model. In performance tests,
the standard version and the novel versions of range flow
estimation are investigated using prefiltered or
nonprefiltered synthetic data with available ground truth.
Furthermore, the influences of additive Gaussian noise and
simulated shot noise are investigated. Finally, we compare
all range flow estimators on real data.},
keywords = {Algorithms / Artifacts / Artificial Intelligence / Image
Enhancement: methods / Image Interpretation,
Computer-Assisted: methods / Imaging, Three-Dimensional:
methods / Lighting: methods / Pattern Recognition,
Automated: methods / J (WoSType)},
cin = {ICG-3 / JARA-BRAIN},
ddc = {620},
cid = {I:(DE-Juel1)ICG-3-20090406 / $I:(DE-82)080010_20140620$},
pnm = {Terrestrische Umwelt},
pid = {G:(DE-Juel1)FUEK407},
shelfmark = {Computer Science, Artificial Intelligence / Engineering,
Electrical $\&$ Electronic},
typ = {PUB:(DE-HGF)16},
pubmed = {pmid:20634558},
UT = {WOS:000279969000008},
doi = {10.1109/TPAMI.2009.162},
url = {https://juser.fz-juelich.de/record/1797},
}