@inproceedings{3091eeb9280547158e794a6cb1c3e209,
title = "New Tight Relaxations of Rank Minimization for Multi-Task Learning",
abstract = "Multi-task learning has been observed by many researchers, which supposes that different tasks can share a low-rank common yet latent subspace. It means learning multiple tasks jointly is better than learning them independently. In this paper, we propose two novel multi-task learning formulations based on two regularization terms, which can learn the optimal shared latent subspace by minimizing the exactly k minimal singular values. The proposed regularization terms are the more tight approximations of rank minimization than trace norm. But it's an NP-hard problem to solve the exact rank minimization problem. Therefore, we design a novel re-weighted based iterative strategy to solve our models, which can tactically handle the exact rank minimization problem by setting a large penalizing parameter. Experimental results on benchmark datasets demonstrate that our methods can correctly recover the low-rank structure shared across tasks, and outperform related multi-task learning methods.",
keywords = "multi-task learning, rank minimization, re-weighted method, tight relaxation",
author = "Wei Chang and Feiping Nie and Rong Wang and Xuelong Li",
note = "Publisher Copyright: {\textcopyright} 2021 ACM.; 30th ACM International Conference on Information and Knowledge Management, CIKM 2021 ; Conference date: 01-11-2021 Through 05-11-2021",
year = "2021",
month = oct,
day = "26",
doi = "10.1145/3459637.3482154",
language = "英语",
series = "International Conference on Information and Knowledge Management, Proceedings",
publisher = "Association for Computing Machinery",
pages = "2910--2914",
booktitle = "CIKM 2021 - Proceedings of the 30th ACM International Conference on Information and Knowledge Management",
}