BibTeX
@article{2509.25507v1,
Author = {Anirban Chatterjee and Sayantan Choudhury and Rohan Hore},
Title = {One-shot Conditional Sampling: MMD meets Nearest Neighbors},
Eprint = {2509.25507v1},
ArchivePrefix = {arXiv},
PrimaryClass = {stat.ML},
Abstract = {How can we generate samples from a conditional distribution that we never
fully observe? This question arises across a broad range of applications in
both modern machine learning and classical statistics, including image
post-processing in computer vision, approximate posterior sampling in
simulation-based inference, and conditional distribution modeling in complex
data settings. In such settings, compared with unconditional sampling,
additional feature information can be leveraged to enable more adaptive and
efficient sampling. Building on this, we introduce Conditional Generator using
MMD (CGMMD), a novel framework for conditional sampling. Unlike many
contemporary approaches, our method frames the training objective as a simple,
adversary-free direct minimization problem. A key feature of CGMMD is its
ability to produce conditional samples in a single forward pass of the
generator, enabling practical one-shot sampling with low test-time complexity.
We establish rigorous theoretical bounds on the loss incurred when sampling
from the CGMMD sampler, and prove convergence of the estimated distribution to
the true conditional distribution. In the process, we also develop a uniform
concentration result for nearest-neighbor based functionals, which may be of
independent interest. Finally, we show that CGMMD performs competitively on
synthetic tasks involving complex conditional densities, as well as on
practical applications such as image denoising and image super-resolution.},
Year = {2025},
Month = {Sep},
Url = {http://arxiv.org/abs/2509.25507v1},
File = {2509.25507v1.pdf}
}