@inproceedings{f917bcc4ee5e4c37b1f0fa40e2c0bb31,
title = "Minimum precision requirements for the SVM-SGD learning algorithm",
abstract = "It is well-known that the precision of data, weight vector, and internal representations employed in learning systems directly impacts their energy, throughput, and latency. The precision requirements for the training algorithm are also important for systems that learn on-the-fly. In this paper, we present analytical lower bounds on the precision requirements for the commonly employed stochastic gradient descent (SGD) on-line learning algorithm in the specific context of a support vector machine (SVM). These bounds are obtained subject to desired system performance. These bounds are validated using the UCI breast cancer dataset. Additionally, the impact of these precisions on the energy consumption of a fixed-point SVM with on-line training is studied. Simulation results in 45 nm CMOS process show that operating at the minimum precision as dictated by our bounds improves energy consumption by a factor of 5.3× as compared to conventional precision assignments with no observable loss in accuracy.",
keywords = "accuracy, energy, fixed point, machine learning, precision",
author = "Charbel Sakr and Ameya Patil and Sai Zhang and Yongjune Kim and Naresh Shanbhag",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 IEEE International Conference on Acoustics, Speech, and Signal Processing, ICASSP 2017 ; Conference date: 05-03-2017 Through 09-03-2017",
year = "2017",
month = jun,
day = "16",
doi = "10.1109/ICASSP.2017.7952334",
language = "English (US)",
series = "ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1138--1142",
booktitle = "2017 IEEE International Conference on Acoustics, Speech, and Signal Processing, ICASSP 2017 - Proceedings",
address = "United States",
}