% IMPORTANT: The following is UTF-8 encoded.  This means that in the presence
% of non-ASCII characters, it will not work with BibTeX 0.99 or older.
% Instead, you should use an up-to-date BibTeX implementation like “bibtex8” or
% “biber”.

@ARTICLE{Drnevich:642844,
      author       = {Drnevich, Matthew and Jiggins, Stephen and Katzy, Judith
                      and Cranmer, Kyle},
      title        = {{N}eural quasiprobabilistic likelihood ratio estimation
                      with negatively weighted data},
      journal      = {Machine learning: science and technology},
      volume       = {6},
      number       = {4},
      issn         = {2632-2153},
      address      = {Bristol},
      publisher    = {IOP Publishing},
      reportid     = {PUBDB-2025-05650},
      pages        = {045023 -},
      year         = {2025},
      abstract     = {MLST-IOPscience-Header.pngPurpose-Led Publishing logo.Part
                      of the Machine Learning Series logo.Paper • The following
                      article is Open accessNeural quasiprobabilistic likelihood
                      ratio estimation with negatively weighted dataMatthew
                      Drnevich, Stephen Jiggins*, Judith Katzy and Kyle
                      CranmerPublished 28 October 2025 • © 2025 The Author(s).
                      Published by IOP Publishing LtdMachine Learning: Science and
                      Technology, Volume 6, Number 4Focus on ML and the Physical
                      SciencesCitation Matthew Drnevich et al 2025 Mach. Learn.:
                      Sci. Technol. 6 045023DOI 10.1088/2632-2153/ae0defDownload
                      Article PDFArticle metrics172 Total downloads11 citation on
                      Dimensions.SubmitSubmit to this JournalShare this
                      articleAbstractMotivated by real-world situations found in
                      high energy particle physics, we consider a generalization
                      of the likelihood-ratio estimation task to a
                      quasiprobabilistic setting where probability densities can
                      be negative, and to importance sampling where the importance
                      weights can be negative. The presence of negative densities
                      and negative weights, pose an array of challenges to
                      traditional neural likelihood ratio (LR) estimation methods.
                      We address these challenges by introducing a novel loss
                      function. In addition, we introduce a new model architecture
                      based on the decomposition of a LR using signed mixture
                      models, providing a second strategy for overcoming these
                      challenges. Finally, we demonstrate our approach on a
                      pedagogical example and a real-world example from particle
                      physics.},
      cin          = {ATLAS},
      ddc          = {621.3},
      cid          = {I:(DE-H253)ATLAS-20120731},
      pnm          = {611 - Fundamental Particles and Forces (POF4-611)},
      pid          = {G:(DE-HGF)POF4-611},
      experiment   = {EXP:(DE-H253)LHC-Exp-ATLAS-20150101},
      typ          = {PUB:(DE-HGF)16},
      doi          = {10.1088/2632-2153/ae0def},
      url          = {https://bib-pubdb1.desy.de/record/642844},
}