@inproceedings{huang2023active, abstract = {Active learning strategies aim to minimize the number of queried samples for model training. However, two challenges in pool-based deep active learning on imbalanced datasets are observed in experiments: (1) the declining performance of active learning strategies due to imbalanced class distribution; (2)~the lack of sample diversity in acquisition batches due to the absence of timely model updates. This paper proposes the AL-FaMoUS, a general solution combining fast model updates and class-balanced minibatch selection to the active learning process. Furthermore, a simplification of the AL-FaMoUS, which selects one single sample in each acquisition minibatch, is experimentally evaluated on four image and three time-series imbalanced datasets. The results demonstrate that the implemented AL-FaMoUS outperforms the other adopted AL strategies, including uncertainty sampling and BALD solely combined with either the fast model update or the class balance selection strategy, in terms of accuracy and Macro F1 score.}, author = {Huang, Zhixin and He, Yujiang and Herde, Marek and Huseljic, Denis and Sick, Bernhard}, booktitle = {Workshop on Interactive Adapative Learning (IAL), ECML PKDD}, interhash = {628e2871115e97a27bb1eee0484e1209}, intrahash = {53a9d44816a8f463ce66e8efa287fb0b}, pages = {28--45}, title = {Active Learning with Fast Model Updates and Class-Balanced Selection for Imbalanced Datasets}, url = {https://ceur-ws.org/Vol-3470/paper5.pdf}, year = 2023 }