BibTeX
@ARTICLE{
Bell2018Nsm,
crossref = "Christianson2018Sio",
author = "Bradley M. Bell and Kasper Kristensen",
title = "{N}ewton step methods for {AD} of an objective defined using implicit functions",
journal = "Optimization Methods \& Software",
volume = "33",
number = "4--6",
pages = "907--923",
year = "2018",
publisher = "Taylor \& Francis",
doi = "10.1080/10556788.2017.1406936",
url = "https://doi.org/10.1080/10556788.2017.1406936",
eprint = "https://doi.org/10.1080/10556788.2017.1406936",
abstract = "We consider the problem of computing derivatives of an objective that is defined
using implicit functions; i.e., implicit variables are computed by solving equations that are often
nonlinear and solved by an iterative process. If one were to apply Algorithmic Differentiation (AD)
directly, one would differentiate the iterative process. In this paper we present the Newton step
methods for computing derivatives of the objective. These methods make it easy to take advantage of
sparsity, forward mode, reverse mode, and other AD techniques. We prove that the partial Newton step
method works if the number of steps is equal to the order of the derivatives. The full Newton step
method obtains two derivatives order for each step except for the first step. There are alternative
methods that avoid differentiating the iterative process; e.g., the method implemented in ADOL-C. An
optimal control example demonstrates the advantage of the Newton step methods when computing both
gradients and Hessians. We also discuss the Laplace approximation method for nonlinear mixed effects
models as an example application.",
booktitle = "Special issue of Optimization Methods \& Software: Advances in
Algorithmic Differentiation",
editor = "Bruce Christianson and Shaun A. Forth and Andreas Griewank"
}
|