BibTeX
@article{2410.15320v2,
Author = {Paul E. Chang and Nasrulloh Loka and Daolang Huang and Ulpu Remes and Samuel Kaski and Luigi Acerbi},
Title = {Amortized Probabilistic Conditioning for Optimization, Simulation and
Inference},
Eprint = {2410.15320v2},
ArchivePrefix = {arXiv},
PrimaryClass = {stat.ML},
Abstract = {Amortized meta-learning methods based on pre-training have propelled fields
like natural language processing and vision. Transformer-based neural processes
and their variants are leading models for probabilistic meta-learning with a
tractable objective. Often trained on synthetic data, these models implicitly
capture essential latent information in the data-generation process. However,
existing methods do not allow users to flexibly inject (condition on) and
extract (predict) this probabilistic latent information at runtime, which is
key to many tasks. We introduce the Amortized Conditioning Engine (ACE), a new
transformer-based meta-learning model that explicitly represents latent
variables of interest. ACE affords conditioning on both observed data and
interpretable latent variables, the inclusion of priors at runtime, and outputs
predictive distributions for discrete and continuous data and latents. We show
ACE's modeling flexibility and performance in diverse tasks such as image
completion and classification, Bayesian optimization, and simulation-based
inference.},
Year = {2024},
Month = {Oct},
Url = {http://arxiv.org/abs/2410.15320v2},
File = {2410.15320v2.pdf}
}