Score-based generative models have emerged as state-ofthe-art generative models. In this paper, we introduce a novel sampling scheme that can be combined with pretrained score-based diffusion models to speed up sampling by a factor of two to five in terms of the number of function evaluations (NFEs) with a superior Frechet Inception ´ distance (FID), compared to Annealed Langevin dynamics in noise-conditional score network (NCSN) and improved noise-conditional score network (NCSN++). The proposed sampling algorithm is inspired by momentum-based accelerated gradient descent used in convex optimization techniques. We validate the sampling efficiency of the proposed algorithm in terms of FID on CIFAR-10 and CelebA datasets.
@inproceedings{shetty2024momentum,
title={Momentum-Imbued Langevin Dynamics (MILD) for Faster Sampling},
author={Shetty, Nishanth and Bandla, Manikanta and Neema, Nishit and Asokan, Siddarth and Seelamantula, Chandra Sekhar},
booktitle={ICASSP 2024-2024 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages={6635--6639},
year={2024},
organization={IEEE}
}