% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Manea:1038062,
      author       = {Manea, Paul-Philipp and Leroux, Nathan and Neftci, Emre and
                      Strachan, John Paul},
      title        = {{G}ain {C}ell-{B}ased {A}nalog {C}ontent {A}ddressable
                      {M}emory for {D}ynamic {A}ssociative tasks in {AI}},
      publisher    = {arXiv},
      reportid     = {FZJ-2025-01111},
      year         = {2024},
      abstract     = {Analog Content Addressable Memories (aCAMs) have proven
                      useful for associative in-memory computing applications like
                      Decision Trees, Finite State Machines, and Hyper-dimensional
                      Computing. While non-volatile implementations using FeFETs
                      and ReRAM devices offer speed, power, and area advantages,
                      they suffer from slow write speeds and limited write cycles,
                      making them less suitable for computations involving fully
                      dynamic data patterns. To address these limitations, in this
                      work, we propose a capacitor gain cell-based aCAM designed
                      for dynamic processing, where frequent memory updates are
                      required. Our system compares analog input voltages to
                      boundaries stored in capacitors, enabling efficient dynamic
                      tasks. We demonstrate the application of aCAM within
                      transformer attention mechanisms by replacing the
                      softmax-scaled dot-product similarity with aCAM similarity,
                      achieving competitive results. Circuit simulations on a TSMC
                      28 nm node show promising performance in terms of energy
                      efficiency, precision, and latency, making it well-suited
                      for fast, dynamic AI applications.},
      keywords     = {Emerging Technologies (cs.ET) (Other) / FOS: Computer and
                      information sciences (Other)},
      cin          = {PGI-14 / PGI-15},
      cid          = {I:(DE-Juel1)PGI-14-20210412 / I:(DE-Juel1)PGI-15-20210701},
      pnm          = {5234 - Emerging NC Architectures (POF4-523) / BMBF 16ME0400
                      - Verbundprojekt: Neuro-inspirierte Technologien der
                      künstlichen Intelligenz für die Elektronik der Zukunft -
                      NEUROTEC II - (16ME0400)},
      pid          = {G:(DE-HGF)POF4-5234 / G:(BMBF)16ME0400},
      typ          = {PUB:(DE-HGF)25},
      doi          = {10.48550/arXiv.2410.09755},
      url          = {https://juser.fz-juelich.de/record/1038062},
}