@inproceedings{e4435a232f4d43219e90a2d3bdfd6ca7,
title = "Generalizing Nesterov{\textquoteright}s Acceleration Framework by Embedding Momentum Into Estimating Sequences: New Algorithm and Bounds",
abstract = "We present a new type of heavy-ball momentum term, which is used to construct a class of generalized estimating sequences. These allow for accelerating the minimization process by exploiting the information accumulated in the previous iterates. Combining a newly introduced momentum term with the estimating sequences framework, we devise, as an example, a new black-box accelerated first-order method for solving smooth unconstrained optimization problems. We prove that the proposed method exhibits an improvement over the rate of the celebrated fast gradient method by at least a factor of 1/√ 2, and establish that lower bound on the number of iterations carried through until convergence is O (√ κ/2). Finally, the practical performance benefits of the proposed method are demonstrated by numerical experiments.",
keywords = "Gradient methods, Minimization, Optimization, Information theory, Convergence, estimating sequences, black-box methods, complexity analysis, optimization",
author = "E. Dosti and Vorobyov, {S. A.} and T. Charalambous",
year = "2022",
doi = "10.1109/ISIT50566.2022.9834684",
language = "English",
series = "IEEE International Symposium on Information Theory",
publisher = "IEEE",
pages = "1506--1511",
booktitle = "2022 IEEE International Symposium on Information Theory (ISIT)",
address = "United States",
note = "IEEE International Symposium on Information Theory, ISIT ; Conference date: 26-06-2022 Through 01-07-2022",
}