@inproceedings{fbe9b5f62d1c4193ab6184b18603858f,
title = "Universal Source Coding of Deep Neural Networks",
abstract = "Deep neural networks have shown incredible performance for inference tasks in a variety of domains. Unfortunately, most current deep networks are enormous cloud-based structures that require significant storage space, which limits scaling of deep learning as a service (DLaaS). This paper is concerned with finding universal lossless compressed representations of deep feedforward networkswith synaptic weights drawn from discrete sets. The basic insight that allows much less rate than naive approaches is the recognition that the bipartite graph layers of feedforward networks have a kind of permutation invariance to the labeling of nodes, in terms of inferential operation. We provide efficient algorithms to dissipate this irrelevant uncertainty and then use arithmetic coding to nearly achieve the entropy bound in a universal manner.",
author = "Sourya Basu and Varshney, {Lav R.}",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 Data Compression Conference, DCC 2017 ; Conference date: 04-04-2017 Through 07-04-2017",
year = "2017",
month = may,
day = "8",
doi = "10.1109/DCC.2017.60",
language = "English (US)",
series = "Data Compression Conference Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "310--319",
editor = "Ali Bilgin and Joan Serra-Sagrista and Marcellin, {Michael W.} and Storer, {James A.}",
booktitle = "Proceedings - DCC 2017, 2017 Data Compression Conference",
address = "United States",
}