@inproceedings{6d2643e2e957483a9e5d52084baa828a,
title = "Stable gradient descent",
abstract = "The goal of many machine learning tasks is to learn a model that has small population risk. While mini-batch stochastic gradient descent (SGD) and variants are popular approaches for achieving this goal, it is hard to prescribe a clear stopping criterion and to establish high probability convergence bounds to the population risk. In this paper, we introduce Stable Gradient Descent which validates stochastic gradient computations by splitting data into training and validation sets and reuses samples using a differential private mechanism. StGD comes with a natural upper bound on the number of iterations and has high-probability convergence to the population risk. Experimental results illustrate that StGD is empirically competitive and often better than SGD and GD.",
author = "Yingxue Zhou and Sheng Chen and Arindam Banerjee",
note = "Publisher Copyright: {\textcopyright} 34th Conference on Uncertainty in Artificial Intelligence 2018. All rights reserved.; 34th Conference on Uncertainty in Artificial Intelligence 2018, UAI 2018 ; Conference date: 06-08-2018 Through 10-08-2018",
year = "2018",
language = "English (US)",
series = "34th Conference on Uncertainty in Artificial Intelligence 2018, UAI 2018",
publisher = "Association For Uncertainty in Artificial Intelligence (AUAI)",
pages = "766--775",
editor = "Ricardo Silva and Amir Globerson and Amir Globerson",
booktitle = "34th Conference on Uncertainty in Artificial Intelligence 2018, UAI 2018",
}