% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Kraskov:42907,
      author       = {Kraskov, A. and Stögbauer, H. and Grassberger, P.},
      title        = {{E}stimating {M}utual {I}nformation},
      journal      = {Physical review / E},
      volume       = {69},
      number       = {6},
      issn         = {1539-3755},
      address      = {College Park, Md.},
      publisher    = {APS},
      reportid     = {PreJuSER-42907},
      pages        = {066138},
      year         = {2004},
      note         = {Record converted from VDB: 12.11.2012},
      abstract     = {We present two classes of improved estimators for mutual
                      information M(X,Y), from samples of random points
                      distributed according to some joint probability density
                      mu(x,y). In contrast to conventional estimators based on
                      binnings, they are based on entropy estimates from k-nearest
                      neighbor distances. This means that they are data efficient
                      (with k=1 we resolve structures down to the smallest
                      possible scales), adaptive (the resolution is higher where
                      data are more numerous), and have minimal bias. Indeed, the
                      bias of the underlying entropy estimates is mainly due to
                      nonuniformity of the density at the smallest resolved scale,
                      giving typically systematic errors which scale as functions
                      of k/N for N points. Numerically, we find that both families
                      become exact for independent distributions, i.e. the
                      estimator (M) over cap (X,Y) vanishes (up to statistical
                      fluctuations) if mu(x,y)=mu(x)mu(y). This holds for all
                      tested marginal distributions and for all dimensions of x
                      and y. In addition, we give estimators for redundancies
                      between more than two random variables. We compare our
                      algorithms in detail with existing algorithms. Finally, we
                      demonstrate the usefulness of our estimators for assessing
                      the actual independence of components obtained from
                      independent component analysis (ICA), for improving ICA, and
                      for estimating the reliability of blind source separation.},
      keywords     = {J (WoSType)},
      cin          = {NIC},
      ddc          = {530},
      cid          = {I:(DE-Juel1)NIC-20090406},
      pnm          = {Betrieb und Weiterentwicklung des Höchstleistungsrechners},
      pid          = {G:(DE-Juel1)FUEK254},
      shelfmark    = {Physics, Fluids $\&$ Plasmas / Physics, Mathematical},
      typ          = {PUB:(DE-HGF)16},
      UT           = {WOS:000222502800050},
      pubmed       = {pmid:15244698},
      doi          = {10.1103/PhysRevE.69.066138},
      url          = {https://juser.fz-juelich.de/record/42907},
}