@inproceedings{8a2af493866a43ec863043ce917ba515,
title = "A stochastic compositional gradient method using Markov samples",
abstract = "Consider the convex optimization problem minx f (g(x)) where both f and g are unknown but can be estimated through sampling. We consider the stochastic compositional gradient descent method (SCGD) that updates based on random function and subgradient evaluations, which are generated by a conditional sampling oracle. We focus on the case where samples are corrupted with Markov noise. Under certain diminishing stepsize assumptions, we prove that the iterate of SCGD converges almost surely to an optimal solution if such a solution exists. Under specific constant stepsize assumptions, we obtain finite-sample error bounds for the averaged iterates of the algorithm. We illustrate an application to online value evaluation in dynamic programming.",
author = "Mengdi Wang and Ji Liu",
year = "2016",
month = jul,
day = "2",
doi = "10.1109/WSC.2016.7822134",
language = "English (US)",
series = "Proceedings - Winter Simulation Conference",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "702--713",
editor = "Roeder, {Theresa M.} and Frazier, {Peter I.} and Robert Szechtman and Enlu Zhou",
booktitle = "2016 Winter Simulation Conference",
address = "United States",
note = "2016 Winter Simulation Conference, WSC 2016 ; Conference date: 11-12-2016 Through 14-12-2016",
}