BibTeX
@article{2310.01808v1,
Author = {Benjamin Kurt Miller and Marco Federici and Christoph Weniger and Patrick Forré},
Title = {Simulation-based Inference with the Generalized Kullback-Leibler
Divergence},
Eprint = {2310.01808v1},
ArchivePrefix = {arXiv},
PrimaryClass = {stat.ML},
Abstract = {In Simulation-based Inference, the goal is to solve the inverse problem when
the likelihood is only known implicitly. Neural Posterior Estimation commonly
fits a normalized density estimator as a surrogate model for the posterior.
This formulation cannot easily fit unnormalized surrogates because it optimizes
the Kullback-Leibler divergence. We propose to optimize a generalized
Kullback-Leibler divergence that accounts for the normalization constant in
unnormalized distributions. The objective recovers Neural Posterior Estimation
when the model class is normalized and unifies it with Neural Ratio Estimation,
combining both into a single objective. We investigate a hybrid model that
offers the best of both worlds by learning a normalized base distribution and a
learned ratio. We also present benchmark results.},
Year = {2023},
Month = {Oct},
Url = {http://arxiv.org/abs/2310.01808v1},
File = {2310.01808v1.pdf}
}