@inproceedings{b16692ae03174d7f8eee3e9b2a02fdc7,
title = "EE-Grad: Exploration and Exploitation for Cost-Efficient Mini-Batch SGD",
abstract = "We present a generic framework for mitigating the tradeoff between fidelity and cost in computing stochastic gradients when the costs of acquiring stochastic gradients of different quality are not known a priori. We consider a mini-batch oracle that distributes a limited query budget over a number of stochastic gradients and aggregates them to estimate the true gradient. Since the optimal mini-batch size depends on the unknown cost-fidelity function, we propose an algorithm, EE-Grad, that sequentially explores the performance of mini-batch oracles and exploits the accumulated knowledge to estimate the one achieving the best performance in terms of cost-efficiency. We provide performance guarantees for EE-Grad with respect to the optimal mini-batch oracle, and illustrate these results in the case of strongly convex objectives. We also provide a simple numerical example that corroborates our theoretical findings.",
keywords = "cost-performance tradeoff, exploration-exploitation, mini-batch, stochastic gradient descent",
author = "Donmez, {Mehmet A.} and Jeff Ludwig and Maxim Raginsky and Singer, {Andrew C.}",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 55th Asilomar Conference on Signals, Systems and Computers, ACSSC 2021 ; Conference date: 31-10-2021 Through 03-11-2021",
year = "2021",
doi = "10.1109/IEEECONF53345.2021.9723307",
language = "English (US)",
series = "Conference Record - Asilomar Conference on Signals, Systems and Computers",
publisher = "IEEE Computer Society",
pages = "490--497",
editor = "Matthews, {Michael B.}",
booktitle = "55th Asilomar Conference on Signals, Systems and Computers, ACSSC 2021",
}