@Article{AAM-38-385, author = {Cai , Jian-FengLiu , Haixia and Wang , Yang}, title = {Gradient Descent for Symmetric Tensor Decomposition}, journal = {Annals of Applied Mathematics}, year = {2022}, volume = {38}, number = {4}, pages = {385--413}, abstract = {

Symmetric tensor decomposition is of great importance in applications. Several studies have employed a greedy approach, where the main idea is to first find a best rank-one approximation of a given tensor, and then repeat the process to the residual tensor by subtracting the rank-one component. In this paper, we focus on finding a best rank-one approximation of a given orthogonally order-3 symmetric tensor. We give a geometric landscape analysis of a nonconvex optimization for the best rank-one approximation of orthogonally symmetric tensors. We show that any local minimizer must be a factor in this orthogonally symmetric tensor decomposition, and any other critical points are linear combinations of the factors. Then, we propose a gradient descent algorithm with a carefully designed initialization to solve this nonconvex optimization problem, and we prove that the algorithm converges to the global minimum with high probability for orthogonal decomposable tensors. This result, combined with the landscape analysis, reveals that the greedy algorithm will get the tensor CP low-rank decomposition. Numerical results are provided to verify our theoretical results.

}, issn = {}, doi = {https://doi.org/10.4208/aam.OA-2021-0090}, url = {http://global-sci.org/intro/article_detail/aam/21164.html} }