% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Grassberger:908873,
      author       = {Grassberger, Peter},
      title        = {{O}n {G}eneralized {S}chürmann {E}ntropy {E}stimators},
      journal      = {Entropy},
      volume       = {24},
      number       = {5},
      issn         = {1099-4300},
      address      = {Basel},
      publisher    = {MDPI},
      reportid     = {FZJ-2022-02887},
      pages        = {680 -},
      year         = {2022},
      abstract     = {We present a new class of estimators of Shannon entropy for
                      severely undersampleddiscrete distributions. It is based on
                      a generalization of an estimator proposed by T.
                      Schürmann,which itself is a generalization of an estimator
                      proposed by myself.For a special set of parameters,they are
                      completely free of bias and have a finite variance,
                      something which is widely believedto be impossible. We
                      present also detailed numerical tests, where we compare them
                      with otherrecent estimators and with exact results, and
                      point out a clash with Bayesian estimators for
                      mutualinformation.},
      cin          = {JSC},
      ddc          = {510},
      cid          = {I:(DE-Juel1)JSC-20090406},
      pnm          = {5111 - Domain-Specific Simulation $\&$ Data Life Cycle Labs
                      (SDLs) and Research Groups (POF4-511)},
      pid          = {G:(DE-HGF)POF4-5111},
      typ          = {PUB:(DE-HGF)16},
      pubmed       = {35626564},
      UT           = {WOS:000801648200001},
      doi          = {10.3390/e24050680},
      url          = {https://juser.fz-juelich.de/record/908873},
}