BibTeX
@article{1802.08613v1,
Author = {Dao Nguyen},
Title = {Accelerate iterated filtering},
Eprint = {1802.08613v1},
ArchivePrefix = {arXiv},
PrimaryClass = {stat.ME},
Abstract = {In simulation-based inferences for partially observed Markov process models
(POMP), the by-product of the Monte Carlo filtering is an approximation of the
log likelihood function. Recently, iterated filtering [14, 13] has originally
been introduced and it has been shown that the gradient of the log likelihood
can also be approximated. Consequently, different stochastic optimization
algorithm can be applied to estimate the parameters of the underlying models.
As accelerated gradient is an efficient approach in the optimization
literature, we show that we can accelerate iterated filtering in the same
manner and inherit that high convergence rate while relaxing the restricted
conditions of unbiased gradient approximation. We show that this novel
algorithm can be applied to both convex and non-convex log likelihood
functions. In addition, this approach has substantially outperformed most of
other previous approaches in a toy example and in a challenging scientific
problem of modeling infectious diseases.},
Year = {2018},
Month = {Feb},
Url = {http://arxiv.org/abs/1802.08613v1},
File = {1802.08613v1.pdf}
}