@inproceedings{d4e5e73a602a4173adf1e8c8bc6b803e,
title = "Learning Distributions Generated by Single-Layer ReLU Networks in the Presence of Arbitrary Outliers",
abstract = "We consider a set of data samples such that a fraction of the samples are arbitrary outliers, and the rest are the output samples of a single-layer neural network with rectified linear unit (ReLU) activation. Our goal is to estimate the parameters (weight matrix and bias vector) of the neural network, assuming the bias vector to be non-negative. We estimate the network parameters using the gradient descent algorithm combined with either the median- or trimmed mean-based filters to mitigate the effect of the arbitrary outliers. We then prove that (Equation presented) samples and (Equation presented) time are sufficient for our algorithm to estimate the neural network parameters within an error of ϵ when the outlier probability is 1 − p, where 2/3 < p ≤ 1 and the problem dimension is d (with log factors being ignored here). Our theoretical and simulation results provide insights into the training complexity of ReLU neural networks in terms of the probability of outliers and problem dimension.",
author = "Saikiran Bulusu and Gursoy, {M. Cenk} and Geethu Joseph and Varshney, {Pramod K.}",
note = "Publisher Copyright: {\textcopyright} 2022 Neural information processing systems foundation. All rights reserved.; 36th Conference on Neural Information Processing Systems, NeurIPS 2022 ; Conference date: 28-11-2022 Through 09-12-2022",
year = "2022",
language = "English (US)",
series = "Advances in Neural Information Processing Systems",
publisher = "Neural information processing systems foundation",
editor = "S. Koyejo and S. Mohamed and A. Agarwal and D. Belgrave and K. Cho and A. Oh",
booktitle = "Advances in Neural Information Processing Systems 35 - 36th Conference on Neural Information Processing Systems, NeurIPS 2022",
}