@inproceedings{294, author = {Tianyi Wang and Shu-Ching Chen}, title = {Multi-Label Multi-Task Learning with Dynamic Task Weight Balancing}, abstract = {Data collected from real-world environments often contain multiple objects, scenes, and activities. In comparison to single-label problems, where each data sample only defines one concept, multi-label problems allow the co-existence of multiple concepts. To exploit the rich semantic information in real-world data, multi-label classification has seen many applications in a variety of domains. The traditional approaches to multi-label problems tend to have the side effects of increased memory usage, slow model inference speed, and most importantly the under-utilization of the dependency across concepts. In this paper, we adopt multi-task learning to address these challenges. Multi-task learning treats the learning of each concept as a separate job, while at the same time leverages the shared representations among all tasks. We also propose a dynamic task balancing method to automatically adjust the task weight distribution by taking both sample-level and task-level learning complexities into consideration. Our framework is evaluated on a disaster video dataset and the performance is compared with several state-of-the-art multi-label and multi-task learning techniques. The results demonstrate the effectiveness and supremacy of our approach.}, year = {2020}, journal = {2020 IEEE 21st International Conference on Information Reuse and Integration for Data Science (IRI)}, chapter = {245}, pages = {8}, month = {08}, url = {https://par.nsf.gov/biblio/10233987}, doi = {10.1109/IRI49571.2020.00042}, }