BibTeX
@ARTICLE{
Gonnheimer2025BNH,
author = "G{\"o}nnheimer, Nils and Reuter, Karsten and Margraf, Johannes T.",
title = "Beyond Numerical {H}essians: Higher-Order Derivatives for Machine Learning Interatomic
Potentials via Automatic Differentiation",
journal = "Journal of Chemical Theory and Computation",
volume = "21",
number = "9",
pages = "4742--4752",
year = "2025",
doi = "10.1021/acs.jctc.4c01790",
abstract = "{ The development of machine learning interatomic potentials (MLIPs) has
revolutionized computational chemistry by enhancing the accuracy of empirical force fields while
retaining a large computational speed-up compared to first-principles calculations. Despite these
advancements, the calculation of Hessian matrices for large systems remains challenging, in
particular because analytical second-order derivatives are often not implemented. This necessitates
the use of computationally expensive finite-difference methods, which can furthermore display low
precision in some cases. Automatic differentiation (AD) offers a promising alternative to reduce
this computational effort and makes the calculation of Hessian matrices more efficient and accurate.
Here, we present the implementation of AD-based second-order derivatives for the popular MACE
equivariant graph neural network architecture. The benefits of this method are showcased via a
high-throughput prediction of heat capacities of porous materials with the MACE-MP-0 foundation
model. This is essential for precisely describing gas adsorption in these systems and was previously
possible only with bespoke ML models or expensive first-principles calculations. We find that the
availability of foundation models and accurate analytical Hessian matrices offers comparable
accuracy to bespoke ML models in a zero-shot manner and additionally allows for the investigation of
finite-size and rounding errors in the first-principles data. }",
ad_area = "Chemistry",
ad_theotech = "Hessian"
}
|