@techreport {siahkoohi2020TRfuqf, title = {Faster Uncertainty Quantification for Inverse Problems with Conditional Normalizing Flows}, number = {TR-CSE-2020-2}, year = {2020}, month = {07}, institution = {Georgia Institute of Technology}, abstract = {In inverse problems, we often have access to data consisting of paired samples $(x,y)\sim p_X,Y(x,y)$ where $y$ are partial observations of a physical system, and $x$ represents the unknowns of the problem. Under these circumstances, we can employ supervised training to learn a solution $x$ and its uncertainty from the observations $y$. We refer to this problem as the "supervised" case. However, the data $y\sim p_Y(y)$ collected at one point could be distributed differently than observations $y{\textquoteright}\sim p_Y{\textquoteright}(y{\textquoteright})$, relevant for a current set of problems. In the context of Bayesian inference, we propose a two-step scheme, which makes use of normalizing flows and joint data to train a conditional generator $q_θ(x|y)$ to approximate the target posterior density $p_X|Y(x|y)$. Additionally, this preliminary phase provides a density function $q_θ(x|y)$, which can be recast as a prior for the "unsupervised" problem, e.g. when only the observations $y{\textquoteright}\sim p_Y{\textquoteright}(y{\textquoteright})$, a likelihood model $y{\textquoteright}|x$, and a prior on $x{\textquoteright}$ are known. We then train another invertible generator with output density $q{\textquoteright}_φ(x|y{\textquoteright})$ specifically for $y{\textquoteright}$, allowing us to sample from the posterior $p_X|Y{\textquoteright}(x|y{\textquoteright})$. We present some synthetic results that demonstrate considerable training speedup when reusing the pretrained network $q_θ(x|y{\textquoteright})$ as a warm start or preconditioning for approximating $p_X|Y{\textquoteright}(x|y{\textquoteright})$, instead of learning from scratch. This training modality can be interpreted as an instance of transfer learning. This result is particularly relevant for large-scale inverse problems that employ expensive numerical simulations.}, keywords = {deep learning, invertible networks, Uncertainty quantification}, url = {https://slim.gatech.edu/Publications/Public/TechReport/2020/siahkoohi2020TRfuqf/siahkoohi2020TRfuqf.html}, author = {Ali Siahkoohi and Gabrio Rizzuti and Philipp A. Witte and Felix J. Herrmann} }