@inproceedings{80aec9993ebd42d09f06e34edea4bf5f,
title = "Numerical Exploration of Training Loss Level-Sets in Deep Neural Networks",
abstract = "We present a computational method for empirically characterizing the training loss level-sets of deep neural networks. Our method numerically constructs a path in parameter space that is constrained to a set with a fixed near-zero training loss. By measuring regularization functions and test loss at different points within this path, we examine how different points in the parameter space with the same fixed training loss compare in terms of generalization ability. We also compare this method for finding regularized points with the more typical method, that uses objective functions which are weighted sums of training loss and regularization terms. We apply dimensionality reduction to the traversed paths in order to visualize the loss level sets in a well-regularized region of parameter space. Our results provide new information about the loss landscape of deep neural networks, as well as a new strategy for reducing test loss.",
keywords = "deep learning, generalization, optimization",
author = "Naveed Tahir and Katz, {Garrett E.}",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 2021 International Joint Conference on Neural Networks, IJCNN 2021 ; Conference date: 18-07-2021 Through 22-07-2021",
year = "2021",
month = jul,
day = "18",
doi = "10.1109/IJCNN52387.2021.9534199",
language = "English (US)",
series = "Proceedings of the International Joint Conference on Neural Networks",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "IJCNN 2021 - International Joint Conference on Neural Networks, Proceedings",
}