@article{traore-arXiv230809310T,author={{Traor{\'e}}, Cheik and {Apidopoulos}, Vassilis and {Salzo}, Saverio and {Villa}, Silvia},title={{Variance reduction techniques for stochastic proximal point algorithms}},journal={arXiv e-prints},keywords={Mathematics - Optimization and Control, Computer Science - Machine Learning},year={2023},month=aug,eid={arXiv:2308.09310},pages={arXiv:2308.09310},doi={10.48550/arXiv.2308.09310},archiveprefix={arXiv},eprint={2308.09310},primaryclass={math.OC},}
Peer-reviewed papers:
2023
COAP
Convergence of an Asynchronous Block-Coordinate Forward-Backward Algorithm for Convex Composite Optimization
Cheik Traoré, Saverio Salzo, and Silvia Villa
Computational Optimization and Applications, May 2023
In this paper, we study the convergence properties of a randomized block-coordinate descent algorithm for the minimization of a composite convex objective function, where the block-coordinates are updated asynchronously and randomly according to an arbitrary probability distribution. We prove that the iterates generated by the algorithm form a stochastic quasi-Fejér sequence and thus converge almost surely to a minimizer of the objective function. Moreover, we prove a general sublinear rate of convergence in expectation for the function values and a linear rate of convergence in expectation under an error bound condition of Tseng type. Under the same condition strong convergence of the iterates is provided as well as their linear convergence rate.
@article{traore-s10589-023-00489-w,author={Traor\'{e}, Cheik and Salzo, Saverio and Villa, Silvia},title={Convergence of an Asynchronous Block-Coordinate Forward-Backward Algorithm for Convex Composite Optimization},year={2023},volume={86},number={1},url={https://doi.org/10.1007/s10589-023-00489-w},doi={10.1007/s10589-023-00489-w},journal={Computational Optimization and Applications},month=may,pages={303–344},numpages={42},keywords={Convergence rates, Convex optimization, 90C25, Randomized block-coordinate descent, Forward-backward algorithm, Stochastic quasi-Fej\'{e}r sequences, 65K05, Asynchronous algorithms, 90C06, Error bounds, 49M27},}
2021
ORL
Sequential convergence of AdaGrad algorithm for smooth convex optimization
We prove that the iterates produced by, either the scalar step size variant, or the coordinatewise variant of AdaGrad algorithm, are convergent sequences when applied to convex objective functions with Lipschitz gradient. The key insight is to remark that such AdaGrad sequences satisfy a variable metric quasi-Fejér monotonicity property, which allows to prove convergence.
@article{traore-j.orl.2021.04.011.,title={Sequential convergence of AdaGrad algorithm for smooth convex optimization},author={Traor{\'e}, Cheik and Pauwels, Edouard},url={https://doi.org/10.1016/j.orl.2021.04.011},journal={Operations Research Letters},publisher={Elsevier},volume={49},number={4},pages={452-458},year={2021},month=jul,doi={10.1016/j.orl.2021.04.011},}