@inproceedings{0138a77db34e4731b8e252bbd16cae6c,
title = "Evolutionary training of deep neural networks on heterogeneous computing environments",
abstract = "Deep neural networks are typically trained using gradient-based optimizers such as error backpropagation. This study proposes a framework based on Evolutionary Algorithms (EAs) to train deep neural networks without gradients. The network parameters, which may vary up to millions, are considered optimization variables. We demonstrate the training of an encoder-decoder segmentation network (U-Net) and Long Short-Term Memory (LSTM) model using (μ + λ)-ES, Genetic Algorithm, and Particle Swarm Optimization. The framework can train models with forward propagation on machines with different hardware in a cluster computing environment. We compare prediction results from the two models trained using our framework and backpropagation. We show that the neural networks can be trained in less time on CPUs as compared to the training on specialized compute-intensive GPUs.",
keywords = "evolutionary algorithms, heuristics, neural networks, parallelization",
author = "Subodh Kalia and Mohan, {Chilukuri K.} and Ramakrishna Nemani",
note = "Publisher Copyright: {\textcopyright} 2022 ACM.; 2022 Genetic and Evolutionary Computation Conference, GECCO 2022 ; Conference date: 09-07-2022 Through 13-07-2022",
year = "2022",
month = jul,
day = "9",
doi = "10.1145/3520304.3533954",
language = "English (US)",
series = "GECCO 2022 Companion - Proceedings of the 2022 Genetic and Evolutionary Computation Conference",
publisher = "Association for Computing Machinery, Inc",
pages = "2318--2321",
booktitle = "GECCO 2022 Companion - Proceedings of the 2022 Genetic and Evolutionary Computation Conference",
}