@article{herde2022fast, abstract = {Retraining deep neural networks when new data arrives is typically computationally expensive. Moreover, certain applications do not allow such costly retraining due to time or computational constraints. Fast Bayesian updates are a possible solution to this issue. Therefore, we propose a Bayesian update based on Monte-Carlo samples and a last-layer Laplace approximation for different Bayesian neural network types, i.e., Dropout, Ensemble, and Spectral Normalized Neural Gaussian Process (SNGP). In a large-scale evaluation study, we show that our updates combined with SNGP represent a fast and competitive alternative to costly retraining. As a use case, we combine the Bayesian updates for SNGP with different sequential query strategies to exemplarily demonstrate their improved selection performance in active learning.}, archiveprefix = {arXiv}, author = {Herde, Marek and Huang, Zhixin and Huseljic, Denis and Kottke, Daniel and Vogt, Stephan and Sick, Bernhard}, codeurl = {https://github.com/ies-research/bayesian-updates}, eid = {arXiv:2210.06112}, eprint = {2210.06112}, interhash = {3dbd69fc33f8a2e35b81027507137ebd}, intrahash = {1fa89533ab4492a6a41fa0b9efb87a97}, journal = {arXiv e-prints}, pages = {arXiv:2210.06112}, primaryclass = {cs.LG}, title = {Fast Bayesian Updates for Deep Learning with a Use Case in Active Learning}, url = {https://arxiv.org/abs/2210.06112}, year = 2022 }