BibTeX
@ARTICLE{
Frank2022Ada,
author = "Frank, Steven A.",
title = "Automatic differentiation and the optimization of differential equation models in
biology",
journal = "Frontiers in Ecology and Evolution",
volume = "10",
year = "2022",
url =
"https://www.frontiersin.org/journals/ecology-and-evolution/articles/10.3389/fevo.2022.1010278",
doi = "10.3389/fevo.2022.1010278",
issn = "2296-701X",
abstract = "A computational revolution unleashed the power of artificial neural networks. At
the heart of that revolution is automatic differentiation, which calculates the derivative of a
performance measure relative to a large number of parameters. Differentiation enhances the discovery
of improved performance in large models, an achievement that was previously difficult or impossible.
Recently, a second computational advance optimizes the temporal trajectories traced by differential
equations. Optimization requires differentiating a measure of performance over a trajectory, such as
the closeness of tracking the environment, with respect to the parameters of the differential
equations. Because model trajectories are usually calculated numerically by multistep algorithms,
such as Runge-Kutta, the automatic differentiation must be passed through the numerical algorithm.
This article explains how such automatic differentiation of trajectories is achieved. It also
discusses why such computational breakthroughs are likely to advance theoretical and statistical
studies of biological problems, in which one can consider variables as dynamic paths over time and
space. Many common problems arise between improving success in computational learning models over
performance landscapes, improving evolutionary fitness over adaptive landscapes, and improving
statistical fits to data over information landscapes.",
ad_area = "Ordinary Differential Equations"
}
|