BibTeX
@article{2310.11122v6,
Author = {Lasse Elsemüller and Hans Olischläger and Marvin Schmitt and Paul-Christian Bürkner and Ullrich Köthe and Stefan T. Radev},
Title = {Sensitivity-Aware Amortized Bayesian Inference},
Eprint = {2310.11122v6},
ArchivePrefix = {arXiv},
PrimaryClass = {stat.ML},
Abstract = {Sensitivity analyses reveal the influence of various modeling choices on the
outcomes of statistical analyses. While theoretically appealing, they are
overwhelmingly inefficient for complex Bayesian models. In this work, we
propose sensitivity-aware amortized Bayesian inference (SA-ABI), a multifaceted
approach to efficiently integrate sensitivity analyses into simulation-based
inference with neural networks. First, we utilize weight sharing to encode the
structural similarities between alternative likelihood and prior specifications
in the training process with minimal computational overhead. Second, we
leverage the rapid inference of neural networks to assess sensitivity to data
perturbations and preprocessing steps. In contrast to most other Bayesian
approaches, both steps circumvent the costly bottleneck of refitting the model
for each choice of likelihood, prior, or data set. Finally, we propose to use
deep ensembles to detect sensitivity arising from unreliable approximation
(e.g., due to model misspecification). We demonstrate the effectiveness of our
method in applied modeling problems, ranging from disease outbreak dynamics and
global warming thresholds to human decision-making. Our results support
sensitivity-aware inference as a default choice for amortized Bayesian
workflows, automatically providing modelers with insights into otherwise hidden
dimensions.},
Year = {2023},
Month = {Oct},
Note = {Transactions on Machine Learning Research (08/2024)},
Url = {http://arxiv.org/abs/2310.11122v6},
File = {2310.11122v6.pdf}
}