% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Pedretti:904613,
      author       = {Pedretti, Giacomo and Graves, Catherine E. and Serebryakov,
                      Sergey and Mao, Ruibin and Sheng, Xia and Foltin, Martin and
                      Li, Can and Strachan, John Paul},
      title        = {{T}ree-based machine learning performed in-memory with
                      memristive analog {CAM}},
      journal      = {Nature Communications},
      volume       = {12},
      number       = {1},
      issn         = {2041-1723},
      address      = {[London]},
      publisher    = {Nature Publishing Group UK},
      reportid     = {FZJ-2021-06183},
      pages        = {5806},
      year         = {2021},
      abstract     = {Tree-based machine learning techniques, such as Decision
                      Trees and Random Forests, are top performers in several
                      domains as they do well with limited training datasets and
                      offer improved interpretability compared to Deep Neural
                      Networks (DNN). However, these models are difficult to
                      optimize for fast inference at scale without accuracy loss
                      in von Neumann architectures due to non-uniform memory
                      access patterns. Recently, we proposed a novel analog
                      content addressable memory (CAM) based on emerging memristor
                      devices for fast look-up table operations. Here, we propose
                      for the first time to use the analog CAM as an in-memory
                      computational primitive to accelerate tree-based model
                      inference. We demonstrate an efficient mapping algorithm
                      leveraging the new analog CAM capabilities such that each
                      root to leaf path of a Decision Tree is programmed into a
                      row. This new in-memory compute concept for enables
                      few-cycle model inference, dramatically increasing
                      103 × the throughput over conventional approaches.},
      cin          = {PGI-14},
      ddc          = {500},
      cid          = {I:(DE-Juel1)PGI-14-20210412},
      pnm          = {5234 - Emerging NC Architectures (POF4-523)},
      pid          = {G:(DE-HGF)POF4-5234},
      typ          = {PUB:(DE-HGF)16},
      pubmed       = {pmid:34608133},
      UT           = {WOS:000703617100028},
      doi          = {10.1038/s41467-021-25873-0},
      url          = {https://juser.fz-juelich.de/record/904613},
}