BibTeX
@INCOLLECTION{
Juedes1996GNN,
author = "David W. Juedes and Karthik Balakrishnan",
editor = "Martin Berz and Christian Bischof and George Corliss and Andreas Griewank",
title = "Generalized Neural Networks, Computational Differentiation, and Evolution",
booktitle = "Computational Differentiation: Techniques, Applications, and Tools",
pages = "273--285",
publisher = "SIAM",
address = "Philadelphia, PA",
key = "Juedes1996GNN",
crossref = "Berz1996CDT",
abstract = "Backpropagation is a powerful and widely used procedure for training multilayer,
feedforward artificial neural networks. The procedure can be seen as a special case of the reverse
mode of computational differentiation. This connection between backpropagation and computational
differentiation leads us to envision a scenario wherein neural networks can be trained by using
gradient descent methods and computational differentiation tools like ADOL--C. The primary advantage
offered by such an approach is the possibility of training networks consisting of heterogeneous
functional units---a notion we refer to as {\em generalized neural networks.} This approach, in
conjunction with {\em evolutionary algorithms,} can be used to produce near-optimal designs.
This paper presents this approach in more detail and demonstrates its usefulness through simulation
results.",
keywords = "Neural networks, genetic algorithms, evolution, ADOL--C.",
year = "1996",
ad_tools = "ADOL-C"
}
|