BibTeX
@ARTICLE{
Wang2019Ddp,
author = "Wang, Fei and Zheng, Daniel and Decker, James and Wu, Xilun and Essertel,
Gr\'{e}gory M. and Rompf, Tiark",
title = "Demystifying differentiable programming: shift/reset the penultimate backpropagator",
year = "2019",
issue_date = "August 2019",
publisher = "Association for Computing Machinery",
address = "New York, NY, USA",
volume = "3",
number = "ICFP",
url = "https://doi.org/10.1145/3341700",
doi = "10.1145/3341700",
abstract = "Deep learning has seen tremendous success over the past decade in computer vision,
machine translation, and gameplay. This success rests crucially on gradient-descent optimization and
the ability to “learn” parameters of a neural network by backpropagating observed
errors. However, neural network architectures are growing increasingly sophisticated and diverse,
which motivates an emerging quest for even more general forms of differentiable programming, where
arbitrary parameterized computations can be trained by gradient descent. In this paper, we take a
fresh look at automatic differentiation (AD) techniques, and especially aim to demystify the
reverse-mode form of AD that generalizes backpropagation in neural networks. We uncover a tight
connection between reverse-mode AD and delimited continuations, which permits implementing
reverse-mode AD purely via operator overloading and without managing any auxiliary data structures.
We further show how this formulation of AD can be fruitfully combined with multi-stage programming
(staging), leading to an efficient implementation that combines the performance benefits of deep
learning frameworks based on explicit reified computation graphs (e.g., TensorFlow) with the
expressiveness of pure library approaches (e.g., PyTorch).",
journal = "Proceedings of the ACM on Programming Languages",
month = "jul",
articleno = "96",
numpages = "31",
pages = "96:1--96:31",
keywords = "Automated Differentiation, Delimited Continuations, Differentiable Programming,
Multi-stage Programming",
ad_theotech = "Functional Programming"
}
|