@inproceedings{2bcdc6252b574d5181269db2da863a9b,
title = "Communication-Efficient Distributed PCA by Riemannian Optimization",
abstract = "In this paper, we study the leading eigenvector problem in a statistically distributed setting and propose a communication-efficient algorithm based on Riemannian optimization, which trades local computation for global communication. Theoretical analysis shows that the proposed algorithm linearly converges to the centralized empirical risk minimization solution regarding the number of communication rounds. When the number of data points in local machines is sufficiently large, the proposed algorithm achieves a significant reduction of communication cost over existing distributed PCA algorithms. Superior performance in terms of communication cost of the proposed algorithm is verified on real-world and synthetic datasets. ",
author = "Long-Kai Huang and Pan, \{Sinno Jialin\}",
note = "Publisher Copyright: Copyright 2020 by the author(s). Funding Information: This work is supported by NTU Singapore Nanyang Assistant Professorship (NAP) grant M4081532.020, and Singapore MOE AcRF Tier-2 grant MOE2016-T2-2-06.; 37th International Conference on Machine Learning, ICML 2020 ; Conference date: 13-07-2020 Through 18-07-2020",
year = "2020",
month = jul,
language = "English",
series = "Proceedings of Machine Learning Research",
publisher = "ML Research Press",
pages = "4465--4474",
booktitle = "Proceedings of the 37 th International Conference on Machine Learning, ICML 2021",
url = "https://proceedings.mlr.press/v119/",
}