% IMPORTANT: The following is UTF-8 encoded. This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.
@ARTICLE{Eitrich:48496,
author = {Eitrich, T. and Lang, B.},
title = {{E}fficient {O}ptimization of {S}upport {V}ector {M}achine
{L}earning {P}arameters for {U}nbalanced {D}ata {S}ets},
journal = {Journal of Computational and Applied Mathematics},
volume = {196},
issn = {0377-0427},
address = {Amsterdam [u.a.]},
publisher = {North-Holland},
reportid = {PreJuSER-48496},
pages = {425 - 436},
year = {2006},
note = {Record converted from VDB: 12.11.2012},
abstract = {Support vector machines are powerful kernel methods for
classification and regression tasks. If trained optimally,
they produce excellent separating hyperplanes. The quality
of the training, however, depends not only on the given
training data but also on additional learning parameters,
which are difficult to adjust, in particular for unbalanced
datasets. Traditionally, grid search techniques have been
used for determining suitable values for these parameters.
In this paper, we propose an automated approach to adjusting
the learning parameters using a derivative-free numerical
optimizer. To make the optimization process more efficient,
a new sensitive quality measure is introduced. Numerical
tests with a well-known dataset show that our approach can
produce support vector machines that are very well tuned to
their classification tasks. (c) 2005 Elsevier B.V. All
rights reserved.},
keywords = {J (WoSType)},
cin = {ZAM},
ddc = {510},
cid = {I:(DE-Juel1)VDB62},
pnm = {Scientific Computing},
pid = {G:(DE-Juel1)FUEK411},
shelfmark = {Mathematics, Applied},
typ = {PUB:(DE-HGF)16},
UT = {WOS:000239746800007},
doi = {10.1016/j.cam.2005.09.009},
url = {https://juser.fz-juelich.de/record/48496},
}