@conference {herrmann2023SIAMCSEtns, title = {The Next Step: Interoperable Domain-Specific Programming}, booktitle = {SIAM Conference on Computational Science and Engineering}, year = {2023}, note = {(SIAM CSE, Amsterdam)}, month = {02}, abstract = {Even though domain-specific programming approaches allow for readable, scalable, and maintainable software without sacrificing performance, the new paradigm of learned physics-informed models calls for an interdisciplinary approach typically involving multiple domain-specific languages. Take for example the problem of inverting for the fluid-flow properties from time-lapse seismic data, which entails domain-specific programming on the intersection of wave simulators, matrix-free linear algebra, learned neural surrogates for two-phase flow, and prior and posterior distributions for the fluid-flow properties. While domain-specific solutions exist for each of these sub-disciplines, integrating these approaches {\textendash} which may involve different programming languages {\textendash} into a single coupled scalable inversion framework that supports algorithmic differentiation can be a challenge. However, we show that challenges like this can be met when working with proper abstractions. In our inversion example, this involves math-inspired symbolic abstractions for numerical solutions of the wave equation (Devito), matrix-free implementations for its Jacobians (JUDI.jl), abstractions for Automatic Differentiation (ChainRules.jl), and homegrown implementations for conditional Invertible Neural Networks (InvertibleNetworks.jl) and Fourier Neural Operators (ParametricOperators.jl).}, keywords = {algorithms, deep learning, devito, end-to-end, Fourier neural operators, GCS, JUDI, Jutul, SIAM, software, workshop}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SIAMCSE/2023/herrmann2023SIAMCSEtns/index.html}, author = {Felix J. Herrmann and Mathias Louboutin and Thomas J. Grady II and Ziyi Yin and Rishi Khan} }