@inproceedings{55d4e2714ed9476483cad066800de385,
title = "Large-scale sparse inverse covariance estimation via thresholding and max-det matrix completion",
abstract = "The sparse inverse covariance estimation problem is commonly solved using an l1-regularized Gaussian maximum likelihood estimator known as {"}graphical lasso{"}, but its computational cost becomes prohibitive for large data sets. A recent line of results showed-under mild assumptions-that the graphical lasso estimator can be retrieved by soft-thresholding the sample covariance matrix and solving a maximum determinant matrix completion (MDMC) problem. This paper proves an extension of this result, and describes a Newton-CG algorithm to efficiently solve the MDMC problem. Assuming that the thresholded sample covariance matrix is sparse with a sparse Cholesky factorization, we prove that the algorithm converges to an e-accurate solution in O(n log(l/ϵ)) time and O(n) memory. The algorithm is highly efficient in practice: we solve the associated MDMC problems with as many as 200, 000 variables to 7-9 digits of accuracy in less than an hour on a standard laptop computer running MATLAB.",
author = "Zhang, {Richard Y.} and Salar Fattahi and Somayeh Sojoudi",
year = "2018",
language = "English (US)",
series = "35th International Conference on Machine Learning, ICML 2018",
publisher = "International Machine Learning Society (IMLS)",
pages = "9178--9200",
editor = "Andreas Krause and Jennifer Dy",
booktitle = "35th International Conference on Machine Learning, ICML 2018",
note = "35th International Conference on Machine Learning, ICML 2018 ; Conference date: 10-07-2018 Through 15-07-2018",
}