@conference {erdinc2022AAAIdcc, title = {De-risking Carbon Capture and Sequestration with Explainable CO2 Leakage Detection in Time-lapse Seismic Monitoring Images}, booktitle = {AAAI 2022 Fall Symposium: The Role of AI in Responding to Climate Challenges}, year = {2022}, note = {Accepted in AAAI 2022 Fall Symposium: The Role of AI in Responding to Climate Challenges}, month = {08}, abstract = {With the growing global deployment of carbon capture and sequestration technology to combat climate change, monitor- ing and detection of potential CO2 leakage through existing or storage induced faults are critical to the safe and long-term viability of the technology. Recent work on time-lapse seis- mic monitoring of CO2 storage has shown promising results in its ability to monitor the growth of the CO2 plume from surface recorded seismic data. However, due to the low sen- sitivity of seismic imaging to CO2 concentration, additional developments are required to efficiently interpret the seis- mic images for leakage. In this work, we introduce a binary classification of time-lapse seismic images to delineate CO2 plumes (leakage) using state-of-the-art deep learning models. Additionally, we localize the leakage region of CO2 plumes by leveraging Class Activation Mapping (CAM) methods.}, keywords = {AAAI, CAM, CCS, classification, explainability, JRM, seismic imaging}, url = {https://slim.gatech.edu/Publications/Public/Conferences/AAAI/2022/erdinc2022AAAIdcc/erdinc2022AAAIdcc.pdf}, author = {Huseyin Tuna Erdinc and Abhinav P. Gahlot and Ziyi Yin and Mathias Louboutin and Felix J. Herrmann} } @incollection{bharadwaj2020symae, title={SymAE: an autoencoder with embedded physical symmetries for passive time-lapse monitoring}, author={Bharadwaj, Pawan and Li, Matt and Demanet, Laurent}, booktitle={SEG Technical Program Expanded Abstracts 2020}, pages={1586--1590}, year={2020}, publisher={Society of Exploration Geophysicists} } @incollection{ARTS2003347, title = {Monitoring of {CO2} Injected at Sleipner Using Time Lapse Seismic Data}, editor = {J. Gale and Y. Kaya}, booktitle = {Greenhouse Gas Control Technologies - 6th International Conference}, publisher = {Pergamon}, address = {Oxford}, pages = {347-352}, year = {2003}, isbn = {978-0-08-044276-1}, doi = {https://doi.org/10.1016/B978-008044276-1/50056-8}, url = {https://www.sciencedirect.com/science/article/pii/B9780080442761500568}, author = {R. Arts and O. Eiken and A. Chadwick and P. Zweigel and L. {van der Meer} and B. Zinszner}, abstract = {Publisher Summary Since October 1996, Statoil and its Sleipner partners have injected CO2 into a saline aquifer, the Utsira Sand, at a depth of approximately 1000 m. The aquifer has a thickness of more than 200 m near the injection site and is sealed by thick shales. A multi-institutional research project, SACS (Saline Aquifer CO2 Storage), was formed to predict and monitor the migration of the injected CO2. To this end, two time-lapse seismic surveys over the injection area have been acquired, one in October 1999, after 2.3 million tonnes of CO2 had been injected, and the second in October 2001, after approximately 4.4 million tonnes of CO2 had been injected. Comparison with the base seismic survey of 1994 prior to injection provides insights into the development of the CO2 plume. This chapter presents some selected results of the seismic interpretation of the CO2 plume at the two different time-steps.The overall effect of the accumulated CO2 on the seismic signal is significant. At several depth levels within the Utsira Sand, a large increase in reflectivity has been observed on the time-lapse seismic data caused by individual CO2 accumulations under the intra-reservoir shale layers.} } @article{FURRE20173916, title = {20 Years of Monitoring {CO2}-injection at Sleipner}, journal = {Energy Procedia}, volume = {114}, pages = {3916-3926}, year = {2017}, note = {13th International Conference on Greenhouse Gas Control Technologies, GHGT-13, 14-18 November 2016, Lausanne, Switzerland}, issn = {1876-6102}, doi = {https://doi.org/10.1016/j.egypro.2017.03.1523}, url = {https://www.sciencedirect.com/science/article/pii/S1876610217317174}, author = {Anne-Kari Furre and Ola Eiken and Håvard Alnes and Jonas Nesland Vevatne and Anders Fredrik Kiær}, keywords = {Sleipner, CO-injection, CCS, monitoring, time-lapse seismic, time-lapse gravimetry}, abstract = {The Sleipner CO2 injection project was the world's first industrial offshore CO2 Capture and Storage (CCS) project with more than 16 Mt CO2 injected since 1996. Key monitoring insights from Sleipner are the dual interpretation of seismic and gravimetric monitoring surveys to quantify the free CO2 mass changes and plume geometry development as a function of time. The learnings from Sleipner have contributed to making guidelines for monitoring future CCS injection projects, showing that selection of monitoring technology and the timing and extent of monitoring surveys should be case specific and risk based, while also taking into account the long term nature of CCS projects.} } @article{li2020coupled, title={Coupled Time-Lapse Full-Waveform Inversion for Subsurface Flow Problems Using Intrusive Automatic Differentiation}, author={Li, Dongzhuo and Xu, Kailai and Harris, Jerry M and Darve, Eric}, journal={Water Resources Research}, volume={56}, number={8}, pages={e2019WR027032}, year={2020}, publisher={Wiley Online Library} } @misc{li2020fourier, title={Fourier Neural Operator for Parametric Partial Differential Equations}, author={Zongyi Li and Nikola Kovachki and Kamyar Azizzadenesheli and Burigede Liu and Kaushik Bhattacharya and Andrew Stuart and Anima Anandkumar}, year={2020}, eprint={2010.08895}, archivePrefix={arXiv}, primaryClass={cs.LG} } @article{rass2018spontaneous, title={Spontaneous formation of fluid escape pipes from subsurface reservoirs}, author={R{\"a}ss, Ludovic and Simon, Nina SC and Podladchikov, Yury Y}, journal={Scientific reports}, volume={8}, number={1}, pages={1--11}, year={2018}, publisher={Nature Publishing Group} } @article {oghenekohwo2016GEOPctl, title = {Low-cost time-lapse seismic with distributed compressive sensing{\textendash}-Part 1: exploiting common information among the vintages}, journal = {Geophysics}, volume = {82}, number = {3}, year = {2017}, note = {(Geophysics)}, month = {05}, pages = {P1-P13}, abstract = {Time-lapse seismic is a powerful technology for monitoring a variety of subsurface changes due to reservoir fluid flow. However, the practice can be technically challenging when one seeks to acquire colocated time-lapse surveys with high degrees of replicability among the shot locations. We have determined that under "ideal" circumstances, in which we ignore errors related to taking measurements off the grid, high-quality prestack data can be obtained from randomized subsampled measurements that are observed from surveys in which we choose not to revisit the same randomly subsampled on-the-grid shot locations. Our acquisition is low cost because our measurements are subsampled. We have found that the recovered finely sampled prestack baseline and monitor data actually improve significantly when the same on-the-grid shot locations are not revisited. We achieve this result by using the fact that different time-lapse data share information and that nonreplicated (on-the-grid) acquisitions can add information when prestack data are recovered jointly. Whenever the time-lapse data exhibit joint structure{\textendash}-i.e., they are compressible in some transform domain and share information{\textendash}-sparsity-promoting recovery of the "common component" and "innovations," with respect to this common component, outperforms independent recovery of the prestack baseline and monitor data. The recovered time-lapse data are of high enough quality to serve as the input to extract poststack attributes used to compute time-lapse differences. Without joint recovery, artifacts{\textendash}-due to the randomized subsampling{\textendash}-lead to deterioration of the degree of repeatability of the time-lapse data. We tested this method by carrying out experiments with reliable statistics from thousands of repeated experiments. We also confirmed that high degrees of repeatability are achievable for an ocean-bottom cable survey acquired with time-jittered continuous recording.}, keywords = {Acquisition, joint recovery method, marine, random sampling, time-lapse seismic}, doi = {10.1190/geo2016-0076.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2017/oghenekohwo2016GEOPctl/oghenekohwo2016GEOPctl.html}, author = {Felix Oghenekohwo and Haneet Wason and Ernie Esser and Felix J. Herrmann} } @article {wason2016GEOPctl, title = {Low-cost time-lapse seismic with distributed compressive sensing{\textendash}-Part 2: impact on repeatability}, journal = {Geophysics}, volume = {82}, number = {3}, year = {2017}, note = {(Geophysics)}, month = {05}, pages = {P15-P30}, abstract = {Irregular or off-the-grid spatial sampling of sources and receivers is inevitable in field seismic acquisitions. Consequently, time-lapse surveys become particularly expensive since current practices aim to replicate densely sampled surveys for monitoring changes occurring in the reservoir due to hydrocarbon production. We demonstrate that under certain circumstances, high-quality prestack data can be obtained from cheap randomized subsampled measurements that are observed from nonreplicated surveys. We extend our time-jittered marine acquisition to time-lapse surveys by designing acquisition on irregular spatial grids that render simultaneous, subsampled and irregular measurements. Using the fact that different time-lapse data share information and that nonreplicated surveys add information when prestack data are recovered jointly, we recover periodic densely sampled and colocated prestack data by adapting the recovery method to incorporate a regularization operator that maps traces from an irregular spatial grid to a regular periodic grid. The recovery method is, therefore, a combined operation of regularization, interpolation (estimating missing fine-grid traces from subsampled coarse-grid data), and source separation (unraveling overlapping shot records). By relaxing the insistence on replicability between surveys, we find that recovery of the time-lapse difference shows little variability for realistic field scenarios of slightly nonreplicated surveys that suffer from unavoidable natural deviations in spatial sampling of shots (or receivers) and pragmatic compressed-sensing based nonreplicated surveys when compared to the "ideal" scenario of exact replicability between surveys. Moreover, the recovered densely sampled prestack baseline and monitor data improve significantly when the acquisitions are not replicated, and hence can serve as input to extract poststack attributes used to compute time-lapse differences. Our observations are based on experiments conducted for an ocean-bottom cable survey acquired with time-jittered continuous recording assuming source equalization (or same source signature) for the time-lapse surveys and no changes in wave heights, water column velocities or temperature and salinity profiles, etc.}, keywords = {joint recovery method, Marine acquisition, off-the-grid recovery, Optimization, random sampling, time-lapse seismic}, doi = {10.1190/geo2016-0252.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2017/wason2016GEOPctl/wason2016GEOPctl.html}, author = {Haneet Wason and Felix Oghenekohwo and Felix J. Herrmann} } @article {witte2018cls, title = {Compressive least-squares migration with on-the-fly Fourier transforms}, journal = {Geophysics}, volume = {84}, number = {5}, year = {2019}, note = {(Geophysics)}, month = {08}, pages = {R655-R672}, abstract = {Least-squares reverse-time migration is a powerful approach for true amplitude seismic imaging of complex geological structures, but the successful application of this method is currently hindered by its enormous computational cost, as well as high memory requirements for computing the gradient of the objective function. We tackle these problems by introducing an algorithm for low-cost sparsity-promoting least-squares migration using on-the-fly Fourier transforms. We formulate the least-squares migration objective function in the frequency domain and compute gradients for randomized subsets of shot records and frequencies, thus significantly reducing data movement and the number of overall wave equations solves. By using on-the-fly Fourier transforms, we can compute an arbitrary number of monochromatic frequency-domain wavefields with a time-domain modeling code, instead of having to solve individual Helmholtz equations for each frequency, which quickly becomes computationally infeasible when moving to high frequencies. Our numerical examples demonstrate that compressive imaging with on-the-fly Fourier transforms provides a fast and memory-efficient alternative to time-domain imaging with optimal checkpointing, whose memory requirements for a fixed background model and source wavelet is independent of the number of time steps. Instead, memory and additional computational cost grow with the number of frequencies and determine the amount of subsampling artifacts and crosstalk. In contrast to optimal checkpointing, this offers the possibility to trade both memory and computational cost for image quality or a larger number of iterations and is advantageous in new computing environments such as the cloud, where compute is often cheaper than memory and data movement.}, keywords = {Fourier, least squares migration, sparsity-promotion}, doi = {10.1190/geo2018-0490.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2019/witte2018cls/witte2018cls.pdf}, author = {Philipp A. Witte and Mathias Louboutin and Fabio Luporini and Gerard J. Gorman and Felix J. Herrmann} } @incollection{lumley1997assessing, title={Assessing the technical risk of a 4D seismic project}, author={Lumley, David E and Behrens, Ronald A and Wang, Zhijing}, booktitle={SEG Technical Program Expanded Abstracts 1997}, pages={894--897}, year={1997}, publisher={Society of Exploration Geophysicists} } @ARTICLE{yang2020tdsp, author = {Mengmeng Yang and Zhilong Fang and Philipp A. Witte and Felix J. Herrmann}, title = {Time-domain sparsity promoting least-squares reverse time migration with source estimation}, journal = {Geophysical Prospecting}, year = {2020}, month = {08}, volume = {68}, number = {9}, pages = {2697-2711}, abstract = {Least-squares reverse time migration is well-known for its capability to generate artifact-free true-amplitude subsurface images through fitting observed data in the least-squares sense. However, when applied to realistic imaging problems, this approach is faced with issues related to overfitting and excessive computational costs induced by many wave-equation solves. The fact that the source function is unknown complicates this situation even further. Motivated by recent results in stochastic optimization and transform-domain sparsity-promotion, we demonstrate that the computational costs of inversion can be reduced significantly while avoiding imaging artifacts and restoring amplitudes. While powerfull, these new approaches do require accurate information on the source-time function, which is often lacking. Without this information, the imaging quality deteriorates rapidly. We address this issue by presenting an approach where the source-time function is estimated on the fly through a technique known as variable projection. Aside from introducing negligible computational overhead, the proposed method is shown to perform well on imaging problems with noisy data and problems that involve complex settings such as salt. In either case, the presented method produces high resolution high-amplitude fidelity images including an estimates for the source-time function. In addition, due to its use of stochastic optimization, we arrive at these images at roughly one to two times the cost of conventional reverse time migration involving all data.}, keywords = {sparsity inversion, source estimation, penalty}, note = {(Geophysical Prospecting)}, doi = {10.1111/1365-2478.13021}, url = {https://slim.gatech.edu/Publications/Public/Journals/GeophysicalProspecting/2020/yang2020tdsp/yang2020tdsp.html} } @article{yin2008bregman, title={Bregman iterative algorithms for l1-minimization with applications to compressed sensing}, author={Yin, Wotao and Osher, Stanley and Goldfarb, Donald and Darbon, Jerome}, journal={SIAM Journal on Imaging sciences}, volume={1}, number={1}, pages={143--168}, year={2008}, publisher={SIAM} } @book{avseth2010quantitative, title={Quantitative seismic interpretation: Applying rock physics tools to reduce interpretation risk}, author={Avseth, Per and Mukerji, Tapan and Mavko, Gary}, year={2010}, publisher={Cambridge university press} } @ARTICLE{witte2018alf, author = {Philipp A. Witte and Mathias Louboutin and Navjot Kukreja and Fabio Luporini and Michael Lange and Gerard J. Gorman and Felix J. Herrmann}, title = {A large-scale framework for symbolic implementations of seismic inversion algorithms in Julia}, journal = {Geophysics}, volume = {84}, number = {3}, pages = {F57-F71}, year = {2019}, month = {03}, abstract = {Writing software packages for seismic inversion is a very challenging task, since problems such as full-waveform inversion or least-squares imaging are both algorithmically and computationally demanding due to the large number of unknown parameters and the fact that we are propagating waves over many wavelengths. Software frameworks therefore need to combine both versatility and performance to provide geophysicists with the means and flexibility to implement complex algorithms that scale to exceedingly large 3D problems. Following these principles, we introduce the Julia Devito Inversion framework, an open-source software package in Julia for large-scale seismic modeling and inversion based on Devito, a domain-specific language compiler for automatic code generation. The framework consists of matrix-free linear operators for implementing seismic inversion algorithms that closely resembles the mathematical notation, a flexible resilient parallelization and an interface to Devito for generating optimized stencil code to solve the underlying wave equations. In comparison to many manually optimized industry codes written in low-level languages, our software is built on the idea of independent layers of abstractions and user interfaces with symbolic operators, making it possible to manage both the complexity of algorithms and performance optimizations, while preserving modularity, which allows for a level of expressiveness needed to formulate a broad range of wave-equation-based inversion problems. Through a series of numerical examples, we demonstrate that this allows users to implement algorithms for waveform inversion and imaging as simple Julia scripts that scale to large-scale 3D problems; thus providing a truly performant research and production framework.}, keywords = {FWI, LSRTM, modeling, inversion, software}, doi = {10.1190/geo2018-0174.1}, note = {(Geophysics)}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2019/witte2018alf/witte2018alf.pdf} } @ARTICLE{louboutin2018dae, author = {Mathias Louboutin and Fabio Luporini and Michael Lange and Navjot Kukreja and Philipp A. Witte and Felix J. Herrmann and Paulius Velesko and Gerard J. Gorman}, title = {Devito (v3.1.0): an embedded domain-specific language for finite differences and geophysical exploration}, journal = {Geoscientific Model Development}, year = {2019}, abstract = {We introduce Devito, a new domain-specific language for implementing high-performance finite difference partial differential equation solvers. The motivating application is exploration seismology where methods such as Full-Waveform Inversion and Reverse-Time Migration are used to invert terabytes of seismic data to create images of the earth's subsurface. Even using modern supercomputers, it can take weeks to process a single seismic survey and create a useful subsurface image. The computational cost is dominated by the numerical solution of wave equations and their corresponding adjoints. Therefore, a great deal of effort is invested in aggressively optimizing the performance of these wave-equation propagators for different computer architectures. Additionally, the actual set of partial differential equations being solved and their numerical discretization is under constant innovation as increasingly realistic representations of the physics are developed, further ratcheting up the cost of practical solvers. By embedding a domain-specific language within Python and making heavy use of SymPy, a symbolic mathematics library, we make it possible to develop finite difference simulators quickly using a syntax that strongly resembles the mathematics. The Devito compiler reads this code and applies a wide range of analysis to generate highly optimized and parallel code. This approach can reduce the development time of a verified and optimized solver from months to days.}, keywords = {wave-equation, modeling, finite-differences, HPC}, doi = {10.5194/gmd-12-1165-2019}, note = {(Geoscientific Model Development)}, url = {https://slim.gatech.edu/Publications/Public/Journals/GMD/2019/louboutin2018dae/louboutin2018dae.pdf} } @article{luporini2020architecture, title={Architecture and performance of Devito, a system for automated stencil computation}, author={Luporini, Fabio and Louboutin, Mathias and Lange, Michael and Kukreja, Navjot and Witte, Philipp and H{\"u}ckelheim, Jan and Yount, Charles and Kelly, Paul HJ and Herrmann, Felix J and Gorman, Gerard J}, journal={ACM Transactions on Mathematical Software (TOMS)}, volume={46}, number={1}, pages={1--28}, year={2020}, publisher={ACM New York, NY, USA} } @article{costa2006permeability, title={Permeability-porosity relationship: A reexamination of the Kozeny-Carman equation based on a fractal pore-space geometry assumption}, author={Costa, Antonio}, journal={Geophysical research letters}, volume={33}, number={2}, year={2006}, publisher={Wiley Online Library} } @ARTICLE{oghenekohwo2017hrt, author = {Felix Oghenekohwo and Felix J. Herrmann}, title = {Highly repeatable time-lapse seismic with distributed {Compressive} {Sensing}---mitigating effects of calibration errors}, journal = {The Leading Edge}, year = {2017}, month = {08}, volume = {36}, number = {8}, pages = {688-694}, abstract = {Recently, we demonstrated that combining joint recovery with low-cost non-replicated randomized sampling tailored to time-lapse seismic can give us access to high fidelity, highly repeatable, dense prestack vintages, and high-grade time-lapse. To arrive at this result, we assumed well-calibrated surveys---i.e., we presumed accurate post-plot source/receiver positions. Unfortunately, in practice seismic surveys are prone to calibration errors, which are unknown deviations between actual and post-plot acquisition geometry. By means of synthetic experiments, we analyze the possible impact of these errors on vintages and on time-lapse data obtained with our joint recovery model from compressively sampled surveys. Supported by these experiments, we demonstrate that highly repeatable time-lapse vintages are attainable despite the presence of unknown calibration errors in the positions of the shots. We assess the repeatability quantitatively for two scenarios by studying the impact of calibration errors on conventional dense but irregularly sampled surveys and on low-cost compressed surveys. To separate time-lapse effects from calibration issues, we consider the idealized case where the subsurface remains unchanged and the practical situation where time-lapse changes are restricted to a subset of the data. In both cases, the quality of the recovered vintages and time-lapse decreases gracefully for low-cost compressed surveys with increasing calibration errors. Conversely, the quality of vintages from expensive densely periodically sampled surveys decreases more rapidly as unknown and difficult to control calibration errors increase.}, keywords = {time-lapse seismic, marine, random sampling, calibration errors, joint-recovery method}, doi = {10.1190/tle36080688.1}, note = {(The Leading Edge)}, url = {https://slim.gatech.edu/Publications/Public/Journals/TheLeadingEdge/2017/oghenekohwo2017hrt/oghenekohwo2017hrt.html} } @CONFERENCE{oghenekohwo2017EAGEitl, author = {Felix Oghenekohwo and Felix J. Herrmann}, title = {Improved time-lapse data repeatability with randomized sampling and distributed compressive sensing}, booktitle = {EAGE Annual Conference Proceedings}, year = {2017}, month = {06}, abstract = {Recently, new ideas on randomized sampling for time-lapse seismic acquisition have been proposed to address some of the challenges of replicating time-lapse surveys. These ideas, which stem from distributed compressed sensing (DCS) led to the birth of a joint recovery model (JRM) for processing time-lapse data (noise-free) acquired from non-replicated acquisition geometries. However, when the earth does not change---i.e. no time-lapse—the recovered vintages from two non-replicated surveys should show high repeatability measured in terms of normalized RMS, which is a standard metric for quantifying time-lapse data repeatability. Under this assumption of no time-lapse change, we demonstrate improved repeatability (with JRM) of the recovered data from non-replicated random samplings, first with noisy data and secondly in situations where there are calibration errors i.e. where the acquisition parameters such as source/receiver coordinates are not precise.}, keywords = {EAGE, repeatability, time lapse, compressive sensing, calibration, noise}, note = {(EAGE, Paris)}, doi = {10.3997/2214-4609.201701389}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2017/oghenekohwo2017EAGEitl/oghenekohwo2017EAGEitl.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2017/oghenekohwo2017EAGEitl/oghenekohwo2017EAGEitl_poster.pdf} } @CONFERENCE{wason2015EAGEcsm, author = {Haneet Wason and Felix Oghenekohwo and Felix J. Herrmann}, title = {Compressed sensing in {4-D marine}---recovery of dense time-lapse data from subsampled data without repetition}, booktitle = {EAGE Annual Conference Proceedings}, year = {2015}, month = {06}, abstract = {We present an extension of our time-jittered marine acquisition for time-lapse surveys by working on more realistic field acquisition scenarios by incorporating irregular spatial grids without insisting on repeatability between the surveys. Since we are always subsampled in both the baseline and monitor surveys, we are interested in recovering the densely sampled baseline and monitor, and then the (complete) 4-D difference from subsampled/incomplete baseline and monitor data.}, keywords = {EAGE, simultaneous acquisition, time-lapse, off-the-grid, NFFT}, note = {(EAGE, Madrid)}, doi = {10.3997/2214-4609.201413088}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/wason2015EAGEcsm/wason2015EAGEcsm.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/wason2015EAGEcsm/wason2015EAGEcsm_poster.pdf} } @CONFERENCE{oghenekohwo2015EAGEuci, author = {Felix Oghenekohwo and Rajiv Kumar and Ernie Esser and Felix J. Herrmann}, title = {Using common information in compressive time-lapse full-waveform inversion}, booktitle = {EAGE Annual Conference Proceedings}, year = {2015}, month = {06}, abstract = {The use of time-lapse seismic data to monitor changes in the subsurface has become standard practice in industry. In addition, full-waveform inversion has also been extended to time-lapse seismic to obtain useful time-lapse information. The computational cost of this method are becoming more pronounced as the volume of data increases. Therefore, it is necessary to develop fast inversion algorithms that can also give improved time-lapse results. Rather than following existing joint inversion algorithms, we are motivated by a joint recovery model which exploits the common information among the baseline and monitor data. We propose a joint inversion framework, leveraging ideas from distributed compressive sensing and the modified Gauss-Newton method for full-waveform inversion, by using the shared information in the time-lapse data. Our results on a realistic synthetic example highlight the benefits of our joint inversion approach over a parallel inversion method that does not exploit the shared information. Preliminary results also indicate that our formulation can address time-lapse data with inconsistent acquisition geometries.}, keywords = {EAGE, time-lapse, FWI}, note = {(EAGE, Madrid)}, doi = {10.3997/2214-4609.201413086}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/oghenekohwo2015EAGEuci/oghenekohwo2015EAGEuci.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/oghenekohwo2015EAGEuci/oghenekohwo2015EAGEuci_poster.pdf} } @PHDTHESIS{oghenekohwo2017THetl, author = {Felix Oghenekohwo}, title = {Economic time-lapse seismic acquisition and imaging---{Reaping} the benefits of randomized sampling with distributed compressive sensing}, school = {The University of British Columbia}, year = {2017}, month = {08}, address = {Vancouver}, abstract = {This thesis presents a novel viewpoint on the implicit opportunities randomized surveys bring to time-lapse seismic - which is a proven surveillance tool for hydrocarbon reservoir monitoring. Time-lapse (4D) seismic combines acquisition and processing of at least two seismic datasets (or vintages) in order to extract information related to changes in a reservoir within a specified time interval. The current paradigm places stringent requirements on replicating the 4D surveys, which is an expensive task often requiring uneconomical dense sampling of seismic wavefields. To mitigate the challenges of dense sampling, several advances in seismic acquisition have been made in recent years including the use of multiple sources firing at near simultaneous random times, and the adaptation of Compressive Sensing (CS) principles to design practical acquisition engines that improve sampling efficiency for seismic data acquisition. However, little is known regarding the implications of these developments for time-lapse studies. By conducting multiple experiments modelling surveys adhering to the principles of CS for 4D seismic, I propose a model that demonstrates the feasibility of randomized acquisitions for time-lapse seismic. The proposed joint recovery model (JRM), which derives from distributed CS, exploits the common information in time-lapse data during recovery of dense wavefields from measured subsampled data, providing highly repeatable and high-fidelity vintages. I show that we obtain better vintages when randomized surveys are not replicated, in contrast to standard practice, paving the way for an opportunity to relax the rigorous requirement to replicate surveys precisely. We assert that the vintages obtained using our proposed model are of sufficient quality to serve as inputs to processes that extract time-lapse attributes from which subsurface changes are deduced. Additionally, I show that recovery with the JRM is robust with respect to errors due to differences between actual and recorded postplot information. Finally, I present an opportunity to adapt our model to problems related to time-lapse seismic imaging where the main finding is that we can better delineate time-lapse changes by adapting the joint recovery model to wave-equation based inversion methods.}, keywords = {PhD, time lapse, acquisition, joint recovery, thesis, compressive sensing, distributed compressive sensing}, note = {(PhD)}, url = {https://slim.gatech.edu/Publications/Public/Thesis/2017/oghenekohwo2017THetl/oghenekohwo2017THetl.pdf}, presentation = {https://slim.gatech.edu/Publications/Public/Thesis/2017/oghenekohwo2017THetl/oghenekohwo2017THetl_pres.pdf} } @ARTICLE{kumar2015sss, author = {Rajiv Kumar and Haneet Wason and Felix J. Herrmann}, title = {Source separation for simultaneous towed-streamer marine acquisition --- a compressed sensing approach}, journal = {Geophysics}, year = {2015}, month = {11}, volume = {80}, number = {6}, pages = {WD73-WD88}, abstract = {Simultaneous marine acquisition is an economic way to sample seismic data and speed up acquisition, wherein single or multiple source vessels fire sources at near-simultaneous or slightly random times, resulting in overlapping shot records. The current paradigm for simultaneous towed-streamer marine acquisition incorporates “low variability” in source firing times, i.e., 0 ≤ 1 or 2 s because the sources and receivers are moving. This results in a low degree of randomness in simultaneous data, which is challenging to separate (into its constituent sources) using compressed-sensing-based separation techniques because randomization is key to successful recovery via compressed sensing. We have addressed the challenge of source separation for simultaneous towed-streamer acquisitions via two compressed-sensing-based approaches, i.e., sparsity promotion and rank minimization. We have evaluated the performance of the sparsity-promotion- and rank-minimization-based techniques by simulating two simultaneous towed-streamer acquisition scenarios, i.e., over/under and simultaneous long offset. A field data example from the Gulf of Suez for the over/under acquisition scenario was also developed. We observed that the proposed approaches gave good and comparable recovery qualities of the separated sources, but the rank-minimization technique outperformed the sparsity-promoting technique in terms of the computational time and memory. We also compared these two techniques with the normal-moveout-based median-filtering-type approach, which had comparable results.}, keywords = {acquisition, 2D, inversion, marine, source separation, optimization, sparsity, rank}, note = {(Geophysics)}, doi = {10.1190/geo2015-0108.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2015/kumar2015sss/kumar2015sss_revised.pdf}, url2 = {http://library.seg.org/doi/abs/10.1190/geo2015-0108.1} } @phdthesis{li2015weighted, title={A weighted $\ell_1$-minimization for distributed compressive sensing}, author={Li, Xiaowei}, year={2015}, school={University of British Columbia} } @ARTICLE{Mansour11TRssma, author = {Hassan Mansour and Haneet Wason and Tim T.Y. Lin and Felix J. Herrmann}, title = {Randomized marine acquisition with compressive sampling matrices}, journal = {Geophysical Prospecting}, year = {2012}, volume = {60}, pages = {648-662}, number = {4}, month = {07}, abstract = {Seismic data acquisition in marine environments is a costly process that calls for the adoption of simultaneous-source or randomized acquisition - an emerging technology that is stimulating both geophysical research and commercial efforts. Simultaneous marine acquisition calls for the development of a new set of design principles and post-processing tools. In this paper, we discuss the properties of a specific class of randomized simultaneous acquisition matrices and demonstrate that sparsity-promoting recovery improves the quality of reconstructed seismic data volumes. We propose a practical randomized marine acquisition scheme where the sequential sources fire airguns at only randomly time-dithered instances. We demonstrate that the recovery using sparse approximation from random time-dithering with a single source approaches the recovery from simultaneous-source acquisition with multiple sources. Established findings from the field of compressive sensing indicate that the choice of the sparsifying transform that is incoherent with the compressive sampling matrix can significantly impact the reconstruction quality. Leveraging these findings, we then demonstrate that the compressive sampling matrix resulting from our proposed sampling scheme is incoherent with the curvelet transform. The combined measurement matrix exhibits better isometry properties than other transform bases such as a non-localized multidimensional Fourier transform. We illustrate our results with simulations of "ideal" simultaneous-source marine acquisition, which dithers both in time and space, compared with periodic and randomized time-dithering.}, keywords = {curvelet transform, Fourier, marine acquisition}, doi = {10.1111/j.1365-2478.2012.01075.x}, url = {http://onlinelibrary.wiley.com/doi/10.1111/j.1365-2478.2012.01075.x/abstract}, url2 = {https://slim.gatech.edu/Publications/Public/Journals/GeophysicalProspecting/2012/Mansour11TRssma/Mansour11TRssma.pdf} } @CONFERENCE{kumar2016SEGtjm, author = {Rajiv Kumar and Shashin Sharan and Haneet Wason and Felix J. Herrmann}, title = {Time-jittered marine acquisition---a rank-minimization approach for {5D} source separation}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2016}, month = {10}, pages = {119-123}, abstract = {Simultaneous source marine acquisition has been recognized as an economic way of improving spatial sampling and speedup acquisition time, where a single- (or multiple-) source vessel fires at jittered source locations and time instances. Consequently, the acquired simultaneous data volume is processed to separate the overlapping shot records resulting in densely sampled data volume. It has been shown in the past that the simultaneous source acquisition design and source separation process can be setup as a compressed sensing problem, where conventional seismic data is reconstructed from simultaneous data via a sparsity-promoting optimization formulation. While the recovery quality of separated data is reasonably well, the recovery process can be computationally expensive due to transform-domain redundancy. In this paper, we present a computationally tractable rank-minimization algorithm to separate simultaneous data volumes. The proposed algorithm is suitable for large-scale seismic data, since it avoids singular-value decompositions and uses a low-rank based factorized formulation instead. Results are illustrated for simulations of simultaneous time-jittered continuous recording for a 3D ocean-bottom cable survey.}, keywords = {5D, marine, time-jittered acquisition, source separation, SEG}, note = {(SEG, Dallas)}, doi = {10.1190/segam2016-13878249.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2016/kumar2016SEGtjm/kumar2016SEGtjm.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2016/kumar2016SEGtjm/kumar2016SEGtjm_pres.pdf} } @incollection{wei2018improve, title={Improve 4D seismic interpretability with joint sparsity recovery}, author={Wei, Lei and Tian, Yue and Li, Chang and Oppert, Shauna and Hennenfent, Gilles}, booktitle={SEG Technical Program Expanded Abstracts 2018}, pages={5338--5342}, year={2018}, publisher={Society of Exploration Geophysicists} } @incollection{kaur2020time, title={Time-lapse seismic data inversion for estimating reservoir parameters using deep learning}, author={Kaur, Harpreet and Sun, Alexander and Zhong, Zhi and Fomel, Sergey}, booktitle={SEG Technical Program Expanded Abstracts 2020}, pages={1720--1724}, year={2020}, publisher={Society of Exploration Geophysicists} } @article{lorenz2014linearized, title={The linearized Bregman method via split feasibility problems: Analysis and generalizations}, author={Lorenz, Dirk A and Schopfer, Frank and Wenger, Stephan}, journal={SIAM Journal on Imaging Sciences}, volume={7}, number={2}, pages={1237--1262}, year={2014}, publisher={SIAM} } @inproceedings{janiszewski2014improvements, title={Improvements in the Efficiency of Ocean Bottom Sensor Surveys through the Use of Multiple Independent Seismic Sources}, author={Janiszewski, F and Brewer, J and Mosher, C}, booktitle={EAGE Workshop on Land and Ocean Bottom-Broadband Full Azimuth Seismic Surveys}, volume={2014}, number={1}, pages={1--3}, year={2014}, organization={European Association of Geoscientists \& Engineers} } @ARTICLE{herrmann2008GJInps, author = {Felix J. Herrmann and Gilles Hennenfent}, title = {Non-parametric seismic data recovery with curvelet frames}, journal = {Geophysical Journal International}, year = {2008}, volume = {173}, pages = {233-248}, month = {04}, abstract = {Seismic data recovery from data with missing traces on otherwise regular acquisition grids forms a crucial step in the seismic processing flow. For instance, unsuccessful recovery leads to imaging artifacts and to erroneous predictions for the multiples, adversely affecting the performance of multiple elimination. A non-parametric transform-based recovery method is presented that exploits the compression of seismic data volumes by recently developed curvelet frames. The elements of this transform are multidimensional and directional and locally resemble wavefronts present in the data, which leads to a compressible representation for seismic data. This compression enables us to formulate a new curvelet-based seismic data recovery algorithm through sparsity-promoting inversion. The concept of sparsity-promoting inversion is in itself not new to geophysics. However, the recent insights from the field of {\textquoteleft}compressed sensing{\textquoteright} are new since they clearly identify the three main ingredients that go into a successful formulation of a recovery problem, namely a sparsifying transform, a sampling strategy that subdues coherent aliases and a sparsity-promoting program that recovers the largest entries of the curvelet-domain vector while explaining the measurements. These concepts are illustrated with a stylized experiment that stresses the importance of the degree of compression by the sparsifying transform. With these findings, a curvelet-based recovery algorithms is developed, which recovers seismic wavefields from seismic data volumes with large percentages of traces missing. During this construction, we benefit from the main three ingredients of compressive sampling, namely the curvelet compression of seismic data, the existence of a favorable sampling scheme and the formulation of a large-scale sparsity-promoting solver based on a cooling method. The recovery performs well on synthetic as well as real data and performs better by virtue of the sparsifying property of curvelets. Our results are applicable to other areas such as global seismology.}, keywords = {curvelet transform, reconstruction, SLIM, acquisition}, url = {https://slim.gatech.edu/Publications/Public/Journals/GeophysicalJournalInternational/2008/herrmann2008GJInps.pdf}, doi = {10.1111/j.1365-246X.2007.03698.x} } @CONFERENCE{oghenekohwo2015CSEGctl, author = {Felix Oghenekohwo and Felix J. Herrmann}, title = {Compressive time-lapse seismic data processing using shared information}, booktitle = {CSEG Annual Conference Proceedings}, year = {2015}, month = {05}, abstract = {Time-lapse images void of acquisition and processing artifacts can provide more useful information about subsurface changes compared to those with acquisition footprints and other unwanted anomalies. Although, several pre-processing techniques are being developed and used to mitigate these unwanted artifacts, these operations can be very expensive, challenging and data dependent. Migration, as a processing tool, using a sparsity constraint has been shown to reduce artifacts drastically but little is known about the significance for compressed time-lapse seismic data. Leveraging ideas from distributed compressed sensing, and motivated by our earlier work on recovery of densely sampled time-lapse data from compressively sampled measurements, we present a sparsity-constrained migration for time-lapse data that uses a common component shared by the baseline and monitor data. Our algorithm tested on a synthetic example highlights the advantages of exploiting the common information, compared to ad hoc methods that involve parallel processing of the time-lapse data before differencing.}, keywords = {CSEG, time-lapse}, note = {(CSEG, Calgary)}, url = {https://slim.gatech.edu/Publications/Public/Conferences/CSEG/2015/oghenekohwo2015CSEGctl/oghenekohwo2015CSEGctl.pdf}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/CSEG/2015/oghenekohwo2015CSEGctl/oghenekohwo2015CSEGctl_pres.pdf} } @ARTICLE{herrmann2008ACHAsac, author = {Felix J. Herrmann and Peyman P. Moghaddam and Christiaan C. Stolk}, title = {Sparsity- and continuity-promoting seismic image recovery with curvelet frames}, journal = {Applied and Computational Harmonic Analysis}, year = {2008}, volume = {24}, pages = {150-173}, number = {2}, month = {03}, abstract = {A nonlinear singularity-preserving solution to seismic image recovery with sparseness and continuity constraints is proposed. We observe that curvelets, as a directional frame expansion, lead to sparsity of seismic images and exhibit invariance under the normal operator of the linearized imaging problem. Based on this observation we derive a method for stable recovery of the migration amplitudes from noisy data. The method corrects the amplitudes during a post-processing step after migration, such that the main additional cost is one ap- plication of the normal operator, i.e. a modeling followed by a migration. Asymptotically this normal operator corresponds to a pseudodifferential operator, for which a convenient diagonal approximation in the curvelet domain is derived, including a bound for its error and a method for the estimation of the diagonal from a compound operator consisting of discrete implementations for the scattering operator and its adjoint the migration operator. The solution is formulated as a nonlinear optimization problem where sparsity in the curvelet domain as well as continuity along the imaged reflectors are jointly promoted. To enhance sparsity, the $ell_1$-norm on the curvelet coefficients is minimized, while continuity is promoted by minimizing an anisotropic diffusion norm on the image. The performance of the recovery scheme is evaluated with a time-reversed {\textquoteright}wave-equation{\textquoteright} migration code on synthetic datasets, including the complex SEG/EAGE AA salt model.}, keywords = {curvelet transform, imaging, SLIM, processing}, doi = {10.1016/j.acha.2007.06.007}, url = {https://slim.gatech.edu/Publications/Public/Journals/ACHA/2008/herrmann2008ACHAsac/herrmann2008ACHAsac.pdf} } @incollection{qu2017simultaneous, title={Simultaneous joint migration inversion for semicontinuous time-lapse seismic data}, author={Qu, Shan and Verschuur, Dirk}, booktitle={SEG Technical Program Expanded Abstracts 2017}, pages={5808--5813}, year={2017}, publisher={Society of Exploration Geophysicists} } @article{queisser2013full, title={Full waveform inversion in the time lapse mode applied to {CO2} storage at Sleipner}, author={Quei{\ss}er, Manuel and Singh, Satish C}, journal={Geophysical prospecting}, volume={61}, number={3}, pages={537--555}, year={2013}, publisher={European Association of Geoscientists \& Engineers} } @article{yang2016time, title={Time-lapse full-waveform inversion with ocean-bottom-cable data: Application on Valhall field}, author={Yang, Di and Liu, Faqi and Morton, Scott and Malcolm, Alison and Fehler, Michael}, journal={Geophysics}, volume={81}, number={4}, pages={R225--R235}, year={2016}, publisher={Society of Exploration Geophysicists} } @article{maharramov2019integrated, title={Integrated kinematic time-lapse inversion workflow leveraging full-waveform inversion and machine learning}, author={Maharramov, Musa and Willemsen, Bram and Routh, Partha S and Peacock, Emily F and Froneberger, Mark and Robinson, Alana P and Bear, Glenn W and Lazaratos, Spyros K}, journal={The Leading Edge}, volume={38}, number={12}, pages={943--948}, year={2019}, publisher={Society of Exploration Geophysicists} } @article{zhang2013double, title={Double-difference elastic-waveform inversion with prior information for time-lapse monitoring}, author={Zhang, Zhigang and Huang, Lianjie}, journal={Geophysics}, volume={78}, number={6}, pages={R259--R273}, year={2013}, publisher={Society of Exploration Geophysicists} } @article{yang2015double, title={Double-difference waveform inversion: Feasibility and robustness study with pressure data}, author={Yang, Di and Meadows, Mark and Inderwiesen, Phil and Landa, Jorge and Malcolm, Alison and Fehler, Michael}, journal={Geophysics}, volume={80}, number={6}, pages={M129--M141}, year={2015}, publisher={Society of Exploration Geophysicists} } @article{thomsen1986weak, title={Weak elastic anisotropy}, author={Thomsen, Leon}, journal={Geophysics}, volume={51}, number={10}, pages={1954--1966}, year={1986}, publisher={Society of Exploration Geophysicists} } @article{baysal1983reverse, title={Reverse time migration}, author={Baysal, Edip and Kosloff, Dan D and Sherwood, John WC}, journal={Geophysics}, volume={48}, number={11}, pages={1514--1524}, year={1983}, publisher={Society of Exploration Geophysicists} } @ARTICLE{Li11TRfrfwi, author = {Xiang Li and Aleksandr Y. Aravkin and Tristan van Leeuwen and Felix J. Herrmann}, title = {Fast randomized full-waveform inversion with compressive sensing}, journal = {Geophysics}, year = {2012}, volume = {77}, pages = {A13-A17}, number = {3}, month = {05}, abstract = {Wave-equation based seismic inversion can be formulated as a nonlinear inverse problem where the medium properties are obtained via minimization of a least- squares misfit functional. The demand for higher resolution models in more geologically complex areas drives the need to develop techniques that explore the special structure of full-waveform inversion to reduce the computational burden and to regularize the inverse problem. We meet these goals by using ideas from compressive sensing and stochastic optimization to design a novel Gauss-Newton method, where the updates are computed from random subsets of the data via curvelet-domain sparsity promotion. Application of this idea to a realistic synthetic shows improved results compared to quasi-Newton methods, which require passes through all data. Two different subset sampling strategies are considered: randomized source encoding, and drawing sequential shots firing at random source locations from marine data with missing near and far offsets. In both cases, we obtain excellent inversion results compared to conventional methods at reduced computational costs. }, keywords = {SLIM, FWI, compressive sensing, optimization}, doi = {10.1190/geo2011-0410.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2012/Li11TRfrfwi/Li11TRfrfwi.pdf} } @ARTICLE{herrmann2010GEOPrsg, author = {Felix J. Herrmann}, title = {Randomized sampling and sparsity: getting more information from fewer samples}, journal = {Geophysics}, year = {2010}, volume = {75}, pages = {WB173-WB187}, number = {6}, month = {12}, abstract = {Many seismic exploration techniques rely on the collection of massive data volumes that are subsequently mined for information during processing. Although this approach has been extremely successful in the past, current efforts toward higher-resolution images in increasingly complicated regions of the earth continue to reveal fundamental shortcomings in our workflows. Chiefly among these is the so-called {\textquotedblleft}curse of dimensionality{\textquotedblright} exemplified by Nyquist{\textquoteright}s sampling criterion, which disproportionately strains current acquisition and processing systems as the size and desired resolution of our survey areas continue to increase. We offer an alternative sampling method leveraging recent insights from compressive sensing toward seismic acquisition and processing for data that are traditionally considered to be undersampled. The main outcome of this approach is a new technology where acquisition and processing related costs are no longer determined by overly stringent sampling criteria, such as Nyquist. At the heart of our approach lies randomized incoherent sampling that breaks subsampling related interferences by turning them into harmless noise, which we subsequently remove by promoting transform-domain sparsity. Now, costs no longer grow significantly with resolution and dimensionality of the survey area, but instead depend only on transform-domain sparsity. Our contribution is twofold. First, we demonstrate by means of carefully designed numerical experiments that compressive sensing can successfully be adapted to seismic exploration. Second, we show that accurate recovery can be accomplished for compressively sampled data volumes sizes that exceed the size of conventional transform-domain data volumes by only a small factor. Because compressive sensing combines transformation and encoding by a single linear encoding step, this technology is directly applicable to acquisition and to dimensionality reduction during processing. In either case, sampling, storage, and processing costs scale with transform-domain sparsity. We illustrate this principle by means of number of case studies.}, keywords = {data acquisition, geophysical techniques, Nyquist criterion, sampling methods, seismology, SLIM, acquisition, compressive sensing, optimization}, doi = {10.1190/1.3506147}, publisher = {SEG}, url = {https://slim.gatech.edu/Publications/Public/Journals/Geophysics/2010/herrmann2010GEOPrsg/herrmann2010GEOPrsg.pdf} } @incollection{li2013joint, title={Joint source deblending and reconstruction for seismic data}, author={Li, Chengbo and Mosher, Charles C and Morley, Larry C and Ji, Yongchang and Brewer, Joel D}, booktitle={SEG Technical Program Expanded Abstracts 2013}, pages={82--87}, year={2013}, publisher={Society of Exploration Geophysicists} } @article{cao2019joint, title={Joint deblending and data reconstruction with focal transformation}, author={Cao, Junhai and Verschuur, Eric and Gu, Hanming and Li, Lie}, journal={Geophysics}, volume={84}, number={3}, pages={V219--V231}, year={2019}, publisher={Society of Exploration Geophysicists} } @incollection{xuan2020deblending, title={Deblending of OBN ultralong-offset simultaneous source acquisition}, author={Xuan, Yi and Malik, Raheel and Zhang, Zhigang and Guo, Manhong and Huang, Yi}, booktitle={SEG Technical Program Expanded Abstracts 2020}, pages={2764--2768}, year={2020}, publisher={Society of Exploration Geophysicists} } @incollection{tian2018joint, title={Joint sparsity recovery for noise attenuation}, author={Tian, Yue and Wei, Lei and Li, Chang and Oppert, Shauna and Hennenfent, Gilles}, booktitle={SEG Technical Program Expanded Abstracts 2018}, pages={4186--4190}, year={2018}, publisher={Society of Exploration Geophysicists} } @article{kragh2002seismic, title={Seismic repeatability, normalized rms, and predictability}, author={Kragh, ED and Christie, Phil}, journal={The Leading Edge}, volume={21}, number={7}, pages={640--647}, year={2002}, publisher={Society of Exploration Geophysicists} } @inproceedings{jones2012building, title={Building complex synthetic models to evaluate acquisition geometries and velocity inversion technologies}, author={Jones, CE and Edgar, JA and Selvage, JI and Crook, H}, booktitle={74th EAGE Conference and Exhibition incorporating EUROPEC 2012}, pages={cp--293}, year={2012}, organization={European Association of Geoscientists \& Engineers} } @conference {yin2021SEGcts, title = {Compressive time-lapse seismic monitoring of carbon storage and sequestration with the joint recovery model}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2021}, note = {(IMAGE, Denver)}, month = {09}, pages = {3434-3438}, abstract = {Time-lapse seismic monitoring of carbon storage and sequestration is often challenging because the time-lapse signature of the growth of CO2 plumes is weak in amplitude and therefore difficult to detect seismically. This situation is compounded by the fact that the surveys are often coarsely sampled and not replicated to reduce costs. As a result, images obtained for different vintages (baseline and monitor surveys) often contain artifacts that may be attributed wrongly to time-lapse changes. To address these issues, we propose to invert the baseline and monitor surveys jointly. By using the joint recovery model, we exploit information shared between multiple time-lapse surveys. Contrary to other time-lapse methods, our approach does not rely on replicating the surveys to detect time-lapse changes. To illustrate this advantage, we present a numerical sensitivity study where CO2 is injected in a realistic synthetic model. This model is representative of the geology in the southeast of the North Sea, an area currently considered for carbon sequestration. Our example demonstrates that the joint recovery model improves the quality of time-lapse images allowing us to monitor the CO2 plume seismically.}, keywords = {CCS, Compressive Sensing, Imaging, JRM, marine, SEG, time-lapse}, doi = {10.1190/segam2021-3569087.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/yin2021SEGcts/yin2021SEGcts.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/yin2021SEGcts/Tue-9-28-Yin.html}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/yin2021SEGcts/yin2021SEGcts.mp4}, software = {https://github.com/slimgroup/Software.SEG2021}, author = {Ziyi Yin and Mathias Louboutin and Felix J. Herrmann} } @phdthesis{valenciano2008imaging, title={Imaging by wave-equation inversion}, author={Valenciano, Alejandro A}, year={2008}, school={Citeseer} } @article{zeng2014least, title={Least-squares reverse time migration: Inversion-based imaging toward true reflectivity}, author={Zeng, Chong and Dong, Shuqian and Wang, Bin}, journal={The Leading Edge}, volume={33}, number={9}, pages={962--968}, year={2014}, publisher={Society of Exploration Geophysicists} } @article{candes2006fast, title={Fast discrete curvelet transforms}, author={Candes, Emmanuel and Demanet, Laurent and Donoho, David and Ying, Lexing}, journal={multiscale modeling \& simulation}, volume={5}, number={3}, pages={861--899}, year={2006}, publisher={SIAM} } @article{cai2009linearized, title={Linearized Bregman iterations for compressed sensing}, author={Cai, Jian-Feng and Osher, Stanley and Shen, Zuowei}, journal={Mathematics of computation}, volume={78}, number={267}, pages={1515--1536}, year={2009} } @article{lumley2001time, title={Time-lapse seismic reservoir monitoring}, author={Lumley, David E}, journal={Geophysics}, volume={66}, number={1}, pages={50--53}, year={2001}, publisher={Society of Exploration Geophysicists} } @article{zhou2021non, title={Non-repeatability Effects on Time-Lapse 4D Seismic Full Waveform Inversion for Ocean-Bottom Node Data}, author={Zhou, Wei and Lumley, David}, journal={Geophysics}, volume={86}, number={4}, pages={1--60}, year={2021}, publisher={Society of Exploration Geophysicists} } @article{zhou2021central, title={Central-difference time-lapse 4D seismic full-waveform inversion}, author={Zhou, Wei and Lumley, David}, journal={Geophysics}, volume={86}, number={2}, pages={R161--R172}, year={2021}, publisher={Society of Exploration Geophysicists} } @article{oghenekohwo2017highly, title={Highly repeatable time-lapse seismic with distributed compressive sensing—Mitigating effects of calibration errors}, author={Oghenekohwo, Felix and Herrmann, Felix J}, journal={The Leading Edge}, volume={36}, number={8}, pages={688--694}, year={2017}, publisher={Society of Exploration Geophysicists} } @article{baron2009distributed, title={Distributed compressive sensing}, author={Baron, Dror and Duarte, Marco F and Wakin, Michael B and Sarvotham, Shriram and Baraniuk, Richard G}, journal={arXiv preprint arXiv:0901.3403}, year={2009} } @article{sharan2019fast, title={Fast sparsity-promoting microseismic source estimation}, author={Sharan, Shashin and Wang, Rongrong and Herrmann, Felix J}, journal={Geophysical Journal International}, volume={216}, number={1}, pages={164--181}, year={2019}, publisher={Oxford University Press} } @inproceedings{mansour2012support, title={Support driven reweighted $\ell_1$ minimization}, author={Mansour, Hassan and Yilmaz, {\"O}zg{\"u}r}, booktitle={2012 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)}, pages={3309--3312}, year={2012}, organization={IEEE} } @article{herrmann2008sparsity, title={Sparsity-and continuity-promoting seismic image recovery with curvelet frames}, author={Herrmann, Felix J and Moghaddam, Peyman and Stolk, Christiaan C}, journal={Applied and Computational Harmonic Analysis}, volume={24}, number={2}, pages={150--173}, year={2008}, publisher={Elsevier} } @article{kowalski2009sparsity, title={Sparsity and persistence: mixed norms provide simple signal models with dependent coefficients}, author={Kowalski, Matthieu and Torr{\'e}sani, Bruno}, journal={Signal, image and video processing}, volume={3}, number={3}, pages={251--264}, year={2009}, publisher={Springer} } @inproceedings{witte2017sparsity, title={Sparsity-promoting least-squares migration with the linearized inverse scattering imaging condition}, author={Witte, Philipp A and Yang, Mengmeng and Herrmann, Felix J}, booktitle={79th EAGE Conference and Exhibition 2017}, volume={2017}, number={1}, pages={1--5}, year={2017}, organization={European Association of Geoscientists \& Engineers} } @conference {louboutin2021SEGulm, title = {Ultra-low memory seismic inversion with randomized trace estimation}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2021}, note = {(IMAGE, Denver)}, month = {09}, pages = {787-791}, abstract = {Inspired by recent work on extended image volumes that lays the ground for randomized probing of extremely large seismic wavefield matrices, we present a memory frugal and computationally efficient inversion methodology that uses techniques from randomized linear algebra. By means of a carefully selected realistic synthetic example, we demonstrate that we are capable of achieving competitive inversion results at a fraction of the memory cost of conventional full-waveform inversion with limited computational overhead. By exchanging memory for negligible computational overhead, we open with the presented technology the door towards the use of low-memory accelerators such as GPUs.}, keywords = {FWI, HPC, inversion, randomized linear algebra, SEG}, doi = {10.1190/segam2021-3584072.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/louboutin2021SEGulm/louboutinp.html}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/louboutin2021SEGulm/louboutin2021SEGulm.mp4}, software = {https://github.com/slimgroup/TimeProbeSeismic.jl}, author = {Mathias Louboutin and Felix J. Herrmann} } @article{friedlander2011recovering, title={Recovering compressively sampled signals using partial support information}, author={Friedlander, Michael P and Mansour, Hassan and Saab, Rayan and Yilmaz, {\"O}zg{\"u}r}, journal={IEEE Transactions on Information Theory}, volume={58}, number={2}, pages={1122--1134}, year={2011}, publisher={IEEE} } @article{huang2020towards, title={Towards real-time monitoring: data assimilated time-lapse full waveform inversion for seismic velocity and uncertainty estimation}, author={Huang, Chao and Zhu, Tieyuan}, journal={Geophysical Journal International}, volume={223}, number={2}, pages={811--824}, year={2020}, publisher={Oxford University Press} } @article{li2014kalman, title={A Kalman filter powered by-matrices for quasi-continuous data assimilation problems}, author={Li, Judith Yue and Ambikasaran, Sivaram and Darve, Eric F and Kitanidis, Peter K}, journal={Water Resources Research}, volume={50}, number={5}, pages={3734--3749}, year={2014}, publisher={Wiley Online Library} } @article{ringrose2013salah, title={The In Salah {CO2} storage project: lessons learned and knowledge transfer}, author={Ringrose, PS and Mathieson, AS and Wright, IW and Selama, F and Hansen, O and Bissell, R and Saoula, N and Midgley, J}, journal={Energy Procedia}, volume={37}, pages={6226--6236}, year={2013}, publisher={Elsevier} } @conference {oghenekohwo2014SEGrsw, title = {Randomized sampling without repetition in time-lapse surveys}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2014}, note = {(SEG)}, month = {10}, pages = {4848-4852}, abstract = {Vouching for higher levels of repeatability in acquisition and processing of time-lapse (4D) seismic data has become the standard with oil and gas contractor companies, with significant investment in the design of acquisition systems and processing algorithms that attempt to address some of the current 4D challenges, in particular, imaging weak 4D signals. Recent developments from the field of compressive sensing have shown the benefits of variants of randomized sampling in marine seismic acquisition and its impact for the future of seismic exploration. Following these developments, we show that the requirement for accurate survey repetition in time-lapse seismic data acquisition can be waived provided we solve a sparsity-promoting convex optimization program that makes use of the shared component between the baseline and monitor data. By setting up a framework for inversion of the stacked sections of a time-lapse data, given the pre-stack data volumes, we are able to extract 4D signals with relatively highfidelity from significant subsamplings. Our formulation is applied to time-lapse data that has been acquired with different source/receiver geometries, paving the way for an efficient approach to dealing with time-lapse data acquired with initially poor repeatability levels, provided the survey geometry details are known afterwards.}, keywords = {4D, Acquisition, random, repetition, SEG, time-lapse}, doi = {http://dx.doi.org/10.1190/segam2014-1627.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2014/oghenekohwo2014SEGrsw/oghenekohwo2014SEGrsw.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2014/oghenekohwo2014SEGrsw/oghenekohwo2014SEGrsw_pres.pdf}, author = {Felix Oghenekohwo and Rajiv Kumar and Felix J. Herrmann} } @incollection{watanabe2004differential, title={Differential waveform tomography for time-lapse crosswell seismic data with application to gas hydrate production monitoring}, author={Watanabe, Toshiki and Shimizu, Shoshiro and Asakawa, Eiichi and Matsuoka, Toshifumi}, booktitle={SEG Technical Program Expanded Abstracts 2004}, pages={2323--2326}, year={2004}, publisher={Society of Exploration Geophysicists} } @incollection{denli2009double, title={Double-difference elastic waveform tomography in the time domain}, author={Denli, Huseyin and Huang, Lianjie}, booktitle={SEG Technical Program Expanded Abstracts 2009}, pages={2302--2306}, year={2009}, publisher={Society of Exploration Geophysicists} } @article{eikrem2019iterated, title={Iterated extended Kalman filter method for time-lapse seismic full-waveform inversion}, author={Eikrem, Kjersti Solberg and N{\ae}vdal, Geir and Jakobsen, Morten}, journal={Geophysical Prospecting}, volume={67}, number={2}, pages={379--394}, year={2019}, publisher={European Association of Geoscientists \& Engineers} } @article{kalman1960new, abstract = {{The classical filtering and prediction problem is re-examined using the Bode-Shannon representation of random processes and the ``state-transition'' method of analysis of dynamic systems. New results are: (1) The formulation and methods of solution of the problem apply without modification to stationary and nonstationary statistics and to growing-memory and infinite-memory filters. (2) A nonlinear difference (or differential) equation is derived for the covariance matrix of the optimal estimation error. From the solution of this equation the co-efficients of the difference (or differential) equation of the optimal linear filter are obtained without further calculations. (3) The filtering problem is shown to be the dual of the noise-free regulator problem. The new method developed here is applied to two well-known problems, confirming and extending earlier results. The discussion is largely self-contained and proceeds from first principles; basic concepts of the theory of random processes are reviewed in the Appendix.}}, author = {Kalman, R. E.}, doi = {10.1115/1.3662552}, eprint = {https://asmedigitalcollection.asme.org/fluidsengineering/article-pdf/82/1/35/5518977/35\_1.pdf}, issn = {0021-9223}, journal = {Journal of Basic Engineering}, month = {03}, number = {1}, pages = {35-45}, title = {{A New Approach to Linear Filtering and Prediction Problems}}, url = {https://doi.org/10.1115/1.3662552}, volume = {82}, year = {1960}, bdsk-url-1 = {https://doi.org/10.1115/1.3662552}} @article{herrmann11GPelsqIm, title = {Efficient least-squares imaging with sparsity promotion and compressive sensing}, journal = {Geophysical Prospecting}, volume = {60}, number = {4}, year = {2012}, month = {07}, pages = {696-712}, address = {University of British Columbia, Vancouver}, abstract = {Seismic imaging is a linearized inversion problem relying on the minimization of a least-squares misfit functional as a function of the medium perturbation. The success of this procedure hinges on our ability to handle large systems of equations{\textendash}-whose size grows exponentially with the demand for higher resolution images in more and more complicated areas{\textendash}-and our ability to invert these systems given a limited amount of computational resources. To overcome this "curse of dimensionality" in problem size and computational complexity, we propose a combination of randomized dimensionality-reduction and divide-and-conquer techniques. This approach allows us to take advantage of sophisticated sparsity-promoting solvers that work on a series of smaller subproblems each involving a small randomized subset of data. These subsets correspond to artificial simultaneous-source experiments made of random superpositions of sequential-source experiments. By changing these subsets after each subproblem is solved, we are able to attain an inversion quality that is competitive while requiring fewer computational, and possibly, fewer acquisition resources. Application of this concept to a controlled series of experiments showed the validity of our approach and the relationship between its efficiency{\textendash}-by reducing the number of sources and hence the number of wave-equation solves{\textendash}-and the image quality. Application of our dimensionality-reduction methodology with sparsity promotion to a complicated synthetic with well-log constrained structure also yields excellent results underlining the importance of sparsity promotion.}, keywords = {Compressive Sensing, Imaging, Optimization, SLIM}, doi = {10.1111/j.1365-2478.2011.01041.x}, url = {https://slim.gatech.edu/Publications/Public/Journals/GeophysicalProspecting/2012/herrmann11GPelsqIm/herrmann11GPelsqIm.pdf}, url2 = {http://onlinelibrary.wiley.com/doi/10.1111/j.1365-2478.2011.01041.x/full}, author = {Felix J. Herrmann and Xiang Li} } @conference {tu2013EAGElsm, title = {Fast least-squares migration with multiples and source estimation}, booktitle = {EAGE Annual Conference Proceedings}, year = {2013}, month = {06}, abstract = {The advent of modern computing has made it possible to do seismic imaging using least-squares reverse-time migration. We obtain superior images by solving an optimization problem that recovers the true-amplitude images. However, its success hinges on overcoming several issues, including overwhelming problem size, unknown source wavelet, and interfering coherent events like multiples. In this abstract, we reduce the problem size by using ideas from compressive sensing, and estimate source wavelet by generalized variable projection. We also demonstrate how to invert for subsurface information encoded in surface-related multiples by incorporating the free-surface operator as an areal source in reverse-time migration. Our synthetic examples show that multiples help to improve the resolution of the image, as well as remove the amplitude ambiguity in wavelet estimation.}, keywords = {EAGE, Imaging, multiples, source estimation, sparse}, doi = {10.3997/2214-4609.20130727}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2013/tu2013EAGElsm/tu2013EAGElsm.pdf}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2013/tu2013EAGElsm/tu2013EAGElsm_pres.pdf}, author = {Ning Tu and Aleksandr Y. Aravkin and Tristan van Leeuwen and Felix J. Herrmann} } @article {tu2014fis, title = {Fast imaging with surface-related multiples by sparse inversion}, journal = {Geophysical Journal International}, volume = {201}, number = {1}, year = {2015}, note = {(Geophysical Journal International)}, month = {04}, pages = {304-317}, abstract = {In marine exploration seismology, surface-related multiples are usually treated as noise mainly because subsequent processing steps, such as migration velocity analysis and imaging, require multiple-free data. Failure to remove these wavefield components from the data may lead to erroneous estimates for migration velocity or result in strong coherent artefacts that interfere with the imaged reflectors. However, multiples can carry complementary information compared to primaries, as they interact with the free surface and are therefore exposed more to the subsurface. Recent work has shown that when processed correctly multiples can improve seismic illumination. Given a sufficiently accurate background velocity model and an estimate for the source signature, we propose a new and computationally efficient linearized inversion procedure based on two-way wave equations, which produces accurate images of the subsurface from the total upgoing wavefield including surface-related multiples. Modelling of the surface-related multiples in the proposed method derives from the well-known surface-related multiple elimination method. We incur a minimal overhead from incorporating the multiples by having the wave-equation solver carry out the multiple predictions via the inclusion of an areal source instead of expensive dense matrix-matrix multiplications. By using subsampling techniques, we obtain high-quality true-amplitude least-squares migrated images at computational costs of roughly a single reverse-time migration (RTM) with all the data. These images are virtually free of coherent artefacts from multiples. Proper inversion of the multiples would be computationally infeasible without using these techniques that significantly bring down the cost. By promoting sparsity in the curvelet domain and using rerandomization, out method gains improved robustness to errors in the background velocity model, and errors incurred in the linearization of the wave equation with respect to the model. We demonstrate the superior performance of the proposed method compared to the conventional RTM using realistic synthetic examples.}, keywords = {approximate message passing, Compressive Sensing, curvelet, inversion, Kaczmarz, multiples}, url = {https://slim.gatech.edu/Publications/Public/Journals/GeophysicalJournalInternational/2014/tu2014fis/tu2014fis.pdf}, url2 = {http://gji.oxfordjournals.org/cgi/content/full/ggv020?ijkey=pKzrSXbtxjlOjzd\&keytype=ref}, author = {Ning Tu and Felix J. Herrmann} } @conference {herrmann2015EAGEfom, title = {Fast "online" migration with Compressive Sensing}, booktitle = {EAGE Annual Conference Proceedings}, year = {2015}, note = {(EAGE, Madrid)}, month = {06}, abstract = {We present a novel adaptation of a recently developed relatively simple iterative algorithm to solve large-scale sparsity-promoting optimization problems. Our algorithm is particularly suitable to large-scale geophysical inversion problems, such as sparse least-squares reverse-time migration or Kirchoff migration since it allows for a tradeoff between parallel computations, memory allocation, and turnaround times, by working on subsets of the data with different sizes. Comparison of the proposed method for sparse least-squares imaging shows a performance that rivals and even exceeds the performance of state-of-the art one-norm solvers that are able to carry out least-squares migration at the cost of a single migration with all data.}, keywords = {EAGE, LSRTM}, doi = {10.3997/2214-4609.201412942}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/herrmann2015EAGEfom/herrmann2015EAGEfom.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2015/herrmann2015EAGEfom/herrmann2015EAGEfom_pres.pdf}, author = {Felix J. Herrmann and Ning Tu and Ernie Esser} } @article{douma2010connection, title={On the connection between artifact filtering in reverse-time migration and adjoint tomography}, author={Douma, Huub and Yingst, David and Vasconcelos, Ivan and Tromp, Jeroen}, journal={Geophysics}, volume={75}, number={6}, pages={S219--S223}, year={2010}, publisher={Society of Exploration Geophysicists} } @article {louboutin2017fwi, title = {Full-Waveform Inversion - Part 1: forward modeling}, journal = {The Leading Edge}, volume = {36}, number = {12}, year = {2017}, note = {(The Leading Edge)}, month = {12}, pages = {1033-1036}, abstract = {Since its re-introduction by Pratt (1999), full-waveform inversion (FWI) has gained a lot of attention in geophysical exploration because of its ability to build high resolution velocity models more or less automatically in areas of complex geology. While there is an extensive and growing literature on the topic, publications focus mostly on technical aspects, making this topic inaccessible for a broader audience due to the lack of simple introductory resources for newcomers to geophysics. We will accomplish this by providing a hands-on walkthrough of FWI using Devito (Lange et al. 2016), a system based on domain-specific languages that automatically generates code for time-domainfinite-differences.}, keywords = {devito, finite-differences, FWI, Modeling, tutorial}, doi = {10.1190/tle36121033.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/TheLeadingEdge/2017/louboutin2017fwi/louboutin2017fwi.html}, author = {Mathias Louboutin and Philipp A. Witte and Michael Lange and Navjot Kukreja and Fabio Luporini and Gerard Gorman and Felix J. Herrmann} } @article {louboutin2017fwip2, title = {Full-Waveform Inversion - Part 2: adjoint modeling}, journal = {The Leading Edge}, volume = {37}, number = {1}, year = {2018}, note = {(The Leading Edge)}, month = {1}, pages = {69-72}, abstract = {This tutorial is the second part of a three part tutorial series on full-waveform inversion (FWI), in which we provide a step by step walk through of setting up forward and adjoint wave equation solvers and an optimization framework for inversion. In part 1 (Louboutin et al., 2017), we demonstrated how to discretize the acoustic wave equation and how to set up a basic forward modeling scheme using Devito, a domain-specific language (DSL) in Python for automated finite-difference (FD) computations (Lange et al., 2016). Devito allows us to define wave equations as symbolic Python expressions (Meurer et al., 2017), from which optimized FD stencil code is automatically generated at run time. In part 1, we show how we can use Devito to set up and solve acoustic wave equations with (impulsive) seismic sources and sample wavefields at the receiver locations to model shot records.}, keywords = {acoustic, devito, finite-difference, FWI, tutorial}, doi = {10.1190/tle37010069.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/TheLeadingEdge/2018/louboutin2017fwip2/louboutin2017fwip2.html}, author = {Mathias Louboutin and Philipp A. Witte and Michael Lange and Navjot Kukreja and Fabio Luporini and Gerard Gorman and Felix J. Herrmann} } @article {witte2018fwip3, title = {Full-Waveform Inversion - Part 3: optimization}, journal = {The Leading Edge}, volume = {37}, number = {2}, year = {2018}, note = {(The Leading Edge)}, month = {1}, pages = {142-145}, abstract = {This tutorial is the third part of a full-waveform inversion (FWI) tutorial series with a step-by-step walkthrough of setting up forward and adjoint wave equations and building a basic FWI inversion framework. For discretizing and solving wave equations, we use Devito, a Python-based domain-specific language for automated generation of finite-difference code (Lange et al., 2016). The first two parts of this tutorial (Louboutin et al., 2017, 2018) demonstrated how to solve the acoustic wave equation for modeling seismic shot records and how to compute the gradient of the FWI objective function using the adjoint-state method. With these two key ingredients, we will now build an inversion framework that can be used to minimize the FWI least-squares objective function.}, keywords = {devito, finite-differences, FWI, inversion, Modeling, tutorial}, doi = {10.1190/tle37020142.1}, url = {https://slim.gatech.edu/Publications/Public/Journals/TheLeadingEdge/2018/witte2018fwip3/witte2018fwip3.html}, author = {Philipp A. Witte and Mathias Louboutin and Keegan Lensink and Michael Lange and Navjot Kukreja and Fabio Luporini and Gerard Gorman and Felix J. Herrmann} } @article{chadwick2010quantitative, title={Quantitative analysis of time-lapse seismic monitoring data at the Sleipner {CO2} storage operation}, author={Chadwick, Andy and Williams, Gareth and Delepine, Nicolas and Clochard, Vincent and Labat, Karine and Sturton, Susan and Buddensiek, Maike-L and Dillen, Menno and Nickel, Michael and Lima, Anne Louise and others}, journal={The Leading Edge}, volume={29}, number={2}, pages={170--177}, year={2010}, publisher={Society of Exploration Geophysicists} } @incollection{eiken2000seismic, title={Seismic monitoring of {CO2} injected into a marine acquifer}, author={Eiken, Ola and Brevik, I and Arts, R and Lindeberg, E and Fagervik, K}, booktitle={SEG Technical Program Expanded Abstracts 2000}, pages={1623--1626}, year={2000}, publisher={Society of Exploration Geophysicists} } @article{duncan1972linear, title={Linear dynamic recursive estimation from the viewpoint of regression analysis}, author={Duncan, David B and Horn, Susan D}, journal={Journal of the American Statistical Association}, volume={67}, number={340}, pages={815--821}, year={1972}, publisher={Taylor \& Francis} } @misc{henryk_modzelewski_2021_5495173, author = {Henryk Modzelewski and Mathias Louboutin and Rafael Orozco and Ziyi (Francis) Yin}, title = {slimgroup/JOLI.jl: v0.7.15}, month = sep, year = 2021, publisher = {Zenodo}, version = {v0.7.15}, doi = {10.5281/zenodo.5495173}, url = {https://doi.org/10.5281/zenodo.5495173} } @article{bezanson2017julia, title={Julia: A fresh approach to numerical computing}, author={Bezanson, Jeff and Edelman, Alan and Karpinski, Stefan and Shah, Viral B}, journal={SIAM review}, volume={59}, number={1}, pages={65--98}, year={2017}, publisher={SIAM} } @software{mathias_louboutin_2022_7086719, author = {Mathias Louboutin and Philipp Witte and Ziyi Yin and Henryk Modzelewski and Carlos da Costa and Kerim and Peterson Nogueira}, title = {slimgroup/JUDI.jl: v3.1.9}, month = sep, year = 2022, publisher = {Zenodo}, version = {v3.1.9}, doi = {10.5281/zenodo.7086719}, url = {https://doi.org/10.5281/zenodo.7086719} } @misc{philipp_a_witte_2020_4301017, author = {Philipp A. Witte and Mathias Loubouting and Felix J. Herrmann}, title = {JUDI4Flux: Seismic modeling for deep learning}, month = dec, year = 2020, publisher = {Zenodo}, version = {v0.1.1}, doi = {10.5281/zenodo.4301017}, url = {https://doi.org/10.5281/zenodo.4301017} } @misc{kailaix_2021_5528428, author = {Dongzhuo Li and Kailai Xu}, title = {lidongzh/FwiFlow.jl: v0.3.1}, month = sep, year = 2021, publisher = {Zenodo}, version = {v0.3.1}, doi = {10.5281/zenodo.5528428}, url = {https://doi.org/10.5281/zenodo.5528428} } @article{bezanson2012julia, title={Julia: A fast dynamic language for technical computing}, author={Bezanson, Jeff and Karpinski, Stefan and Shah, Viral B and Edelman, Alan}, journal={arXiv preprint arXiv:1209.5145}, year={2012} } @software{fabio_luporini_2022_6958070, author = {Fabio Luporini and Mathias Louboutin and Michael Lange and Navjot Kukreja and rhodrin and George Bisbas and Vincenzo Pandolfo and Lucas Cavalcante and Tim Burgess and Gerard Gorman and Ken Hester}, title = {devitocodes/devito: v4.7.1}, month = aug, year = 2022, publisher = {Zenodo}, version = {v4.7.1}, doi = {10.5281/zenodo.6958070}, url = {https://doi.org/10.5281/zenodo.6958070} } @article{xu2020adcme, title={ADCME: Learning spatially-varying physical fields using deep neural networks}, author={Xu, Kailai and Darve, Eric}, journal={arXiv preprint arXiv:2011.11955}, year={2020} } @article{paszke2017automatic, title={Automatic differentiation in pytorch}, author={Paszke, Adam and Gross, Sam and Chintala, Soumith and Chanan, Gregory and Yang, Edward and DeVito, Zachary and Lin, Zeming and Desmaison, Alban and Antiga, Luca and Lerer, Adam}, year={2017} } @article{abadi2016tensorflow, title={Tensorflow: Large-scale machine learning on heterogeneous distributed systems}, author={Abadi, Mart{\'\i}n and Agarwal, Ashish and Barham, Paul and Brevdo, Eugene and Chen, Zhifeng and Citro, Craig and Corrado, Greg S and Davis, Andy and Dean, Jeffrey and Devin, Matthieu and others}, journal={arXiv preprint arXiv:1603.04467}, year={2016} } @article{innes2018flux, title={Flux: Elegant machine learning with Julia}, author={Innes, Mike}, journal={Journal of Open Source Software}, volume={3}, number={25}, pages={602}, year={2018} } @book{mackay2003information, title={Information theory, inference and learning algorithms}, author={MacKay, David JC and Mac Kay, David JC and others}, year={2003}, publisher={Cambridge university press} } @article{stanimirovic2010accelerated, title={Accelerated gradient descent methods with line search}, author={Stanimirovi{\'c}, Predrag S and Miladinovi{\'c}, Marko B}, journal={Numerical Algorithms}, volume={54}, number={4}, pages={503--520}, year={2010}, publisher={Springer} } @article{peters2019algorithms, title={Algorithms and software for projections onto intersections of convex and non-convex sets with applications to inverse problems}, author={Peters, Bas and Herrmann, Felix J}, journal={arXiv preprint arXiv:1902.09699}, year={2019} } @misc{bas_peters_2021_5203700, author = {Bas Peters and Mathias Louboutin and Henryk Modzelewski}, title = {slimgroup/SetIntersectionProjection.jl: v0.2.1}, month = aug, year = 2021, publisher = {Zenodo}, version = {v0.2.1}, doi = {10.5281/zenodo.5203700}, url = {https://doi.org/10.5281/zenodo.5203700} } @article{kingma2014adam, title={Adam: A method for stochastic optimization}, author={Kingma, Diederik P and Ba, Jimmy}, journal={arXiv preprint arXiv:1412.6980}, year={2014} } @article{plessix2006review, title={A review of the adjoint-state method for computing the gradient of a functional with geophysical applications}, author={Plessix, R-E}, journal={Geophysical Journal International}, volume={167}, number={2}, pages={495--503}, year={2006}, publisher={Oxford University Press} } @article{oliver2011recent, title={Recent progress on reservoir history matching: a review}, author={Oliver, Dean S and Chen, Yan}, journal={Computational Geosciences}, volume={15}, number={1}, pages={185--221}, year={2011}, publisher={Springer} } @article{liu1989limited, title={On the limited memory BFGS method for large scale optimization}, author={Liu, Dong C and Nocedal, Jorge}, journal={Mathematical programming}, volume={45}, number={1}, pages={503--528}, year={1989}, publisher={Springer} } @article{li2021physics, title={Physics-informed neural operator for learning partial differential equations}, author={Li, Zongyi and Zheng, Hongkai and Kovachki, Nikola and Jin, David and Chen, Haoxuan and Liu, Burigede and Azizzadenesheli, Kamyar and Anandkumar, Anima}, journal={arXiv preprint arXiv:2111.03794}, year={2021} } @article{rasmussen2021open, title={The open porous media flow reservoir simulator}, author={Rasmussen, Atgeirr Fl{\o} and Sandve, Tor Harald and Bao, Kai and Lauser, Andreas and Hove, Joakim and Skaflestad, B{\aa}rd and Kl{\"o}fkorn, Robert and Blatt, Markus and Rustad, Alf Birger and S{\ae}vareid, Ove and others}, journal={Computers \& Mathematics with Applications}, volume={81}, pages={159--185}, year={2021}, publisher={Elsevier} } @techreport{settgast2018geosx, title={Geosx simulation framework}, author={Settgast, Randolph R and White, JA and Corbett, BC and Vargas, A and Sherman, C and Fu, P and Annavarapu, C and others}, year={2018}, institution={Lawrence Livermore National Lab.(LLNL), Livermore, CA (United States)} } @book{berz1996computational, title={Computational differentiation: techniques, applications, and tools}, author={Berz, Martin}, number={89}, year={1996}, publisher={Society for Industrial \& Applied} } @article{wen2021u, title={{U-FNO}--an enhanced Fourier neural operator based-deep learning model for multiphase flow}, author={Wen, Gege and Li, Zongyi and Azizzadenesheli, Kamyar and Anandkumar, Anima and Benson, Sally M}, journal={arXiv preprint arXiv:2109.03697}, year={2021} } @article{asnaashari2015time, title={Time-lapse seismic imaging using regularized full-waveform inversion with a prior model: which strategy?}, author={Asnaashari, Amir and Brossier, Romain and Garambois, St{\'e}phane and Audebert, Fran{\c{c}}ois and Thore, Pierre and Virieux, Jean}, journal={Geophysical prospecting}, volume={63}, number={1}, pages={78--98}, year={2015}, publisher={European Association of Geoscientists \& Engineers} } @inproceedings{appleyard1983nested, title={Nested factorization}, author={Appleyard, JR}, booktitle={SPE Reservoir Simulation Symposium}, year={1983}, organization={OnePetro} } @conference {siahkoohi2020SEGuqi, title = {Uncertainty quantification in imaging and automatic horizon tracking{\textemdash}a Bayesian deep-prior based approach}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2020}, note = {(SEG, virtual)}, month = {09}, pages = {1636-1640}, abstract = {In inverse problems, uncertainty quantification (UQ) deals with a probabilistic description of the solution nonuniqueness and data noise sensitivity. Setting seismic imaging into a Bayesian framework allows for a principled way of studying uncertainty by solving for the model posterior distribution. Imaging, however, typically constitutes only the first stage of a sequential workflow, and UQ becomes even more relevant when applied to subsequent tasks that are highly sensitive to the inversion outcome. In this paper, we focus on how UQ trickles down to horizon tracking for the determination of stratigraphic models and investigate its sensitivity with respect to the imaging result. As such, the main contribution of this work consists in a data-guided approach to horizon tracking uncertainty analysis. This work is fundamentally based on a special reparameterization of reflectivity, known as "deep prior". Feasible models are restricted to the output of a convolutional neural network with a fixed input, while weights and biases are Gaussian random variables. Given a deep prior model, the network parameters are sampled from the posterior distribution via a Markov chain Monte Carlo method, from which the conditional mean and point-wise standard deviation of the inferred reflectivities are approximated. For each sample of the posterior distribution, a reflectivity is generated, and the horizons are tracked automatically. In this way, uncertainty on model parameters naturally translates to horizon tracking. As part of the validation for the proposed approach, we verified that the estimated confidence intervals for the horizon tracking coincide with geologically complex regions, such as faults.}, keywords = {horizon picking, Imaging, machine learning, SEG, Uncertainty quantification}, doi = {10.1190/segam2020-3417560.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGuqi/siahkoohi2020SEGuqi.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGuqi/siahkoohi2020SEGuqi_pres.pdf}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGuqi/siahkoohi2020SEGuqi_pres.mp4}, software = {https://github.com/slimgroup/Software.SEG2020}, author = {Ali Siahkoohi and Gabrio Rizzuti and Felix J. Herrmann} } @unpublished {siahkoohi2021dbif, title = {Deep Bayesian inference for seismic imaging with tasks}, year = {2021}, note = {Submitted to Geophysics}, month = {10}, abstract = {We propose to use techniques from Bayesian inference and deep neural networks to translate uncertainty in seismic imaging to uncertainty in tasks performed on the image, such as horizon tracking. Seismic imaging is an ill-posed inverse problem because of unavoidable bandwidth and aperture limitations, which that is hampered by the presence of noise and linearization errors. Many regularization methods, such as transform-domain sparsity promotion, have been designed to deal with the adverse effects of these errors, however, these methods run the risk of biasing the solution and do not provide information on uncertainty in the image space and how this uncertainty impacts certain tasks on the image. A systematic approach is proposed to translate uncertainty due to noise in the data to confidence intervals of automatically tracked horizons in the image. The uncertainty is characterized by a convolutional neural network (CNN) and to assess these uncertainties, samples are drawn from the posterior distribution of the CNN weights, used to parameterize the image. Compared to traditional priors, in the literature it is argued that these CNNs introduce a flexible inductive bias that is a surprisingly good fit for many diverse domains in imaging. The method of stochastic gradient Langevin dynamics is employed to sample from the posterior distribution. This method is designed to handle large scale Bayesian inference problems with computationally expensive forward operators as in seismic imaging. Aside from offering a robust alternative to maximum a posteriori estimate that is prone to overfitting, access to these samples allow us to translate uncertainty in the image, due to noise in the data, to uncertainty on the tracked horizons. For instance, it admits estimates for the pointwise standard deviation on the image and for confidence intervals on its automatically tracked horizons.}, keywords = {deep priors, horizon tracking, seismic imaging, Uncertainty quantification}, url = {https://slim.gatech.edu/Publications/Public/Submitted/2021/siahkoohi2021dbif/paper.html}, author = {Ali Siahkoohi and Gabrio Rizzuti and Felix J. Herrmann} } @inproceedings{welling2011bayesian, title={Bayesian learning via stochastic gradient Langevin dynamics}, author={Welling, Max and Teh, Yee W}, booktitle={Proceedings of the 28th international conference on machine learning (ICML-11)}, pages={681--688}, year={2011}, organization={Citeseer} } @article{teletzke2010enhanced, title={Enhanced oil recovery pilot testing best practices}, author={Teletzke, Gary F and Wattenbarger, Robert C and Wilkinson, John R}, journal={SPE Reservoir Evaluation \& Engineering}, volume={13}, number={01}, pages={143--154}, year={2010}, publisher={OnePetro} } @article{karniadakis2021physics, title={Physics-informed machine learning}, author={Karniadakis, George Em and Kevrekidis, Ioannis G and Lu, Lu and Perdikaris, Paris and Wang, Sifan and Yang, Liu}, journal={Nature Reviews Physics}, volume={3}, number={6}, pages={422--440}, year={2021}, publisher={Nature Publishing Group} } @article{lu2019deeponet, title={Deeponet: Learning nonlinear operators for identifying differential equations based on the universal approximation theorem of operators}, author={Lu, Lu and Jin, Pengzhan and Karniadakis, George Em}, journal={arXiv preprint arXiv:1910.03193}, year={2019} } @article{raissi2019physics, title={Physics-informed neural networks: A deep learning framework for solving forward and inverse problems involving nonlinear partial differential equations}, author={Raissi, Maziar and Perdikaris, Paris and Karniadakis, George E}, journal={Journal of Computational physics}, volume={378}, pages={686--707}, year={2019}, publisher={Elsevier} } @article{kochkov2021machine, title={Machine learning--accelerated computational fluid dynamics}, author={Kochkov, Dmitrii and Smith, Jamie A and Alieva, Ayya and Wang, Qing and Brenner, Michael P and Hoyer, Stephan}, journal={Proceedings of the National Academy of Sciences}, volume={118}, number={21}, year={2021}, publisher={National Acad Sciences} } @article{adler2018learned, title={Learned primal-dual reconstruction}, author={Adler, Jonas and {\"O}ktem, Ozan}, journal={IEEE transactions on medical imaging}, volume={37}, number={6}, pages={1322--1332}, year={2018}, publisher={IEEE} } @article{kovachki2021universal, title={On universal approximation and error bounds for Fourier Neural Operators}, author={Kovachki, Nikola and Lanthaler, Samuel and Mishra, Siddhartha}, journal={Journal of Machine Learning Research}, volume={22}, pages={Art--No}, year={2021}, publisher={JMLR Press} } @article{pathak2022fourcastnet, title={FourCastNet: A Global Data-driven High-resolution Weather Model using Adaptive Fourier Neural Operators}, author={Pathak, Jaideep and Subramanian, Shashank and Harrington, Peter and Raja, Sanjeev and Chattopadhyay, Ashesh and Mardani, Morteza and Kurth, Thorsten and Hall, David and Li, Zongyi and Azizzadenesheli, Kamyar and others}, journal={arXiv preprint arXiv:2202.11214}, year={2022} } @article{guibas2021adaptive, title={Adaptive Fourier Neural Operators: Efficient Token Mixers for Transformers}, author={Guibas, John and Mardani, Morteza and Li, Zongyi and Tao, Andrew and Anandkumar, Anima and Catanzaro, Bryan}, journal={arXiv preprint arXiv:2111.13587}, year={2021} } @article{guan2021fourier, title={Fourier Neural Operator Networks: A Fast and General Solver for the Photoacoustic Wave Equation}, author={Guan, Steven and Hsu, Ko-Tsung and Chitnis, Parag V}, journal={arXiv preprint arXiv:2108.09374}, year={2021} } @article{zhang2022fourier, title={Fourier Neural Operator for Solving Subsurface Oil/Water Two-Phase Flow Partial Differential Equation}, author={Zhang, Kai and Zuo, Yuande and Zhao, Hanjun and Ma, Xiaopeng and Gu, Jianwei and Wang, Jian and Yang, Yongfei and Yao, Chuanjin and Yao, Jun}, journal={SPE Journal}, pages={1--15}, year={2022} } @article{innes2019differentiable, title={A differentiable programming system to bridge machine learning and scientific computing}, author={Innes, Mike and Edelman, Alan and Fischer, Keno and Rackauckas, Chris and Saba, Elliot and Shah, Viral B and Tebbutt, Will}, journal={arXiv preprint arXiv:1907.07587}, year={2019} } @inproceedings{lempitsky2018deep, title={Deep image prior}, author={Lempitsky, Victor and Vedaldi, Andrea and Ulyanov, Dmitry}, booktitle={2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition}, pages={9446--9454}, year={2018}, organization={IEEE} } @InProceedings{Cheng_2019_CVPR, author = {Cheng, Zezhou and Gadelha, Matheus and Maji, Subhransu and Sheldon, Daniel}, title = {{A Bayesian Perspective on the Deep Image Prior}}, booktitle = {{The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}}, month = {June}, year = {2019}, pages = {5443--5451} } @article{wu2019parametric, title={Parametric convolutional neural network-domain full-waveform inversion}, author={Wu, Yulang and McMechan, George A}, journal={{GEOPHYSICS}}, volume={84}, number={6}, pages={R881--R896}, year={2019}, publisher={{Society of Exploration Geophysicists}}, doi={10.1190/geo2018-0224.1} } @article{liu2019deep, title={Deep-seismic-prior-based reconstruction of seismic data using convolutional neural networks}, author={Liu, Qun and Fu, Lihua and Zhang, Meng}, journal={arXiv preprint arXiv:1911.08784}, year={2019} } @conference{siahkoohi2020EAGEdlb, title = {A deep-learning based Bayesian approach to seismic imaging and uncertainty quantification}, booktitle = {EAGE Annual Conference Proceedings}, year = {2020}, note = {Accepted in EAGE}, month = {1}, abstract = {Uncertainty quantification is essential when dealing with ill-conditioned inverse problems due to the inherent nonuniqueness of the solution. Bayesian approaches allow us to determine how likely an estimation of the unknown parameters is via formulating the posterior distribution. Unfortunately, it is often not possible to formulate a prior distribution that precisely encodes our prior knowledge about the unknown. Furthermore, adherence to handcrafted priors may greatly bias the outcome of the Bayesian analysis. To address this issue, we propose to use the functional form of a randomly initialized convolutional neural network as an implicit structured prior, which is shown to promote natural images and excludes images with unnatural noise. In order to incorporate the model uncertainty into the final estimate, we sample the posterior distribution using stochastic gradient Langevin dynamics and perform Bayesian model averaging on the obtained samples. Our synthetic numerical experiment verifies that deep priors combined with Bayesian model averaging are able to partially circumvent imaging artifacts and reduce the risk of overfitting in the presence of extreme noise. Finally, we present pointwise variance of the estimates as a measure of uncertainty, which coincides with regions that are difficult to image.}, keywords = {deep learning, EAGE, seismic imaging, stochastic gradient Langevin dynamics, Uncertainty quantification}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2020/siahkoohi2020EAGEdlb/siahkoohi2020EAGEdlb.html}, author = {Ali Siahkoohi and Gabrio Rizzuti and Felix J. Herrmann} } @conference{siahkoohi2020SEGwdp, title = {Weak deep priors for seismic imaging}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2020}, note = {(SEG, virtual)}, month = {09}, pages = {2998-3002}, abstract = {Incorporating prior knowledge on model unknowns of interest is essential when dealing with ill-posed inverse problems due to the nonuniqueness of the solution and data noise. Unfortunately, it is not trivial to fully describe our priors in a convenient and analytical way. Parameterizing the unknowns with a convolutional neural network (CNN), and assuming an uninformative Gaussian prior on its weights, leads to a variational prior on the output space that favors "natural" images and excludes noisy artifacts, as long as overfitting is prevented. This is the so-called deep-prior approach. In seismic imaging, however, evaluating the forward operator is computationally expensive, and training a randomly initialized CNN becomes infeasible. We propose, instead, a weak version of deep priors, which consists of relaxing the requirement that reflectivity models must lie in the network range, and letting the unknowns deviate from the network output according to a Gaussian distribution. Finally, we jointly solve for the reflectivity model and CNN weights. The chief advantage of this approach is that the updates for the CNN weights do not involve the modeling operator, and become relatively cheap. Our synthetic numerical experiments demonstrate that the weak deep prior is more robust with respect to noise than conventional least-squares imaging approaches, with roughly twice the computational cost of reverse-time migration, which is the affordable computational budget in large-scale imaging problems.}, keywords = {deep prior, machine learning, SEG, seismic imaging}, doi = {10.1190/segam2020-3417568.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGwdp/siahkoohi2020SEGwdp.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGwdp/siahkoohi2020SEGwdp_pres.pdf}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/siahkoohi2020SEGwdp/siahkoohi2020SEGwdp_pres.mp4}, software = {https://github.com/slimgroup/Software.SEG2020}, author = {Ali Siahkoohi and Gabrio Rizzuti and Felix J. Herrmann} } @article{teh2016consistency, title={Consistency and fluctuations for stochastic gradient Langevin dynamics}, author={Teh, Yee Whye and Thiery, Alexandre H and Vollmer, Sebastian J}, journal={The Journal of Machine Learning Research}, volume={17}, number={1}, pages={193--225}, year={2016}, publisher={JMLR. org} } @inproceedings{li2016preconditioned, title={Preconditioned stochastic gradient Langevin dynamics for deep neural networks}, author={Li, Chunyuan and Chen, Changyou and Carlson, David and Carin, Lawrence}, booktitle={Thirtieth AAAI Conference on Artificial Intelligence}, year={2016} } @inproceedings{hatab2021assessment, title={Assessment of data error for 4D quantitative interpretation}, author={Hatab, Mohamed and MacBeth, Colin}, booktitle={First International Meeting for Applied Geoscience \& Energy}, pages={3439--3443}, year={2021}, organization={Society of Exploration Geophysicists} } @inproceedings{hatab2021assessing, title={Assessing Data Error for 4D Seismic History Matching: Uncertainties from Processing Workflow}, author={Hatab, M and MacBeth, C}, booktitle={EAGE Annual Conference \& Exhibition}, volume={2021}, number={1}, pages={1--5}, year={2021}, organization={European Association of Geoscientists \& Engineers} } @article{hassanzadeh2009accelerating, title={Accelerating CO2 dissolution in saline aquifers for geological storage - Mechanistic and sensitivity studies}, author={Hassanzadeh, Hassan and Pooladi-Darvish, Mehran and Keith, David W}, journal={Energy \& Fuels}, volume={23}, number={6}, pages={3328--3336}, year={2009}, publisher={ACS Publications} } @article{wu2020potential, title={The potential of coupled carbon storage and geothermal extraction in a CO 2-enhanced geothermal system: a review}, author={Wu, Yu and Li, Pan}, journal={Geothermal Energy}, volume={8}, number={1}, pages={1--28}, year={2020}, publisher={SpringerOpen} } @article{depaolo2013geochemistry, title={Geochemistry of geologic carbon sequestration: an overview}, author={DePaolo, Donald J and Cole, David R}, journal={Reviews in Mineralogy and Geochemistry}, volume={77}, number={1}, pages={1--14}, year={2013}, publisher={Mineralogical Society of America} } @article{turkel1993review, title={Review of preconditioning methods for fluid dynamics}, author={Turkel, Eli}, journal={Applied Numerical Mathematics}, volume={12}, number={1-3}, pages={257--284}, year={1993}, publisher={Elsevier} } @article{turkel1999preconditioning, title={Preconditioning techniques in computational fluid dynamics}, author={Turkel, Eli}, journal={Annual Review of Fluid Mechanics}, volume={31}, number={1}, pages={385--416}, year={1999}, publisher={Annual Reviews 4139 El Camino Way, PO Box 10139, Palo Alto, CA 94303-0139, USA} } @article{ayeni2010target, title={Target-oriented joint least-squares migration/inversion of time-lapse seismic data sets}, author={Ayeni, Gboyega and Biondi, Biondo}, journal={Geophysics}, volume={75}, number={3}, pages={R61--R73}, year={2010}, publisher={Society of Exploration Geophysicists} } @phdthesis{kotsi2020time, title={Time-lapse seismic imaging and uncertainty quantification}, author={Kotsi, Maria}, year={2020}, school={Memorial University of Newfoundland} } @article{dupuy2017quantitative, title={Quantitative seismic characterization of CO2 at the Sleipner storage site, North Sea}, author={Dupuy, Bastien and Romdhane, Anouar and Eliasson, Peder and Querendez, Etor and Yan, Hong and Torres, Ver{\'o}nica A and Ghaderi, Amir}, journal={Interpretation}, volume={5}, number={4}, pages={SS23--SS42}, year={2017}, publisher={Society of Exploration Geophysicists and American Association of Petroleum~…} } @article{eiken2011lessons, title={Lessons learned from 14 years of CCS operations: Sleipner, In Salah and Sn{\o}hvit}, author={Eiken, Ola and Ringrose, Philip and Hermanrud, Christian and Nazarian, Bamshad and Torp, Tore A and H{\o}ier, Lars}, journal={Energy procedia}, volume={4}, pages={5541--5548}, year={2011}, publisher={Elsevier} } @article{wu2021significance, title={Significance of fault seal in assessing CO2 storage capacity and containment risks--an example from the Horda Platform, northern North Sea}, author={Wu, Long and Thorsen, Rune and Ottesen, Signe and Meneguolo, Renata and Hartvedt, Kristin and Ringrose, Philip and Nazarian, Bamshad}, journal={Petroleum Geoscience}, volume={27}, number={3}, pages={petgeo2020--102}, year={2021}, publisher={European Association of Geoscientists \& Engineers} } @book{ringrose2020store, title={How to store {CO2} underground: Insights from early-mover {CCS} Projects}, author={Ringrose, Philip}, year={2020}, publisher={Springer} } @article{oliver20214d, title={4D seismic history matching}, author={Oliver, Dean S and Fossum, Kristian and Bhakta, Tuhin and Sand{\o}, Ivar and N{\ae}vdal, Geir and Lorentzen, Rolf Johan}, journal={Journal of Petroleum Science and Engineering}, volume={207}, pages={109119}, year={2021}, publisher={Elsevier} } @article{abidoye2015geological, title={Geological carbon sequestration in the context of two-phase flow in porous media: a review}, author={Abidoye, Luqman K and Khudaida, Kamal J and Das, Diganta B}, journal={Critical Reviews in Environmental Science and Technology}, volume={45}, number={11}, pages={1105--1147}, year={2015}, publisher={Taylor \& Francis} } @incollection{nordbotten2011geological, title={Geological storage of CO2: modeling approaches for large-scale simulation}, author={Nordbotten, Jan M and Celia, Michael Anthony}, booktitle={Geological Storage of CO 2: Modeling Approaches for Large-Scale Simulation}, year={2011}, publisher={John Wiley and Sons} } @article{sengupta2003impact, title={Impact of flow-simulation parameters on saturation scales and seismic velocity}, author={Sengupta, Madhumita and Mavko, Gary}, journal={Geophysics}, volume={68}, number={4}, pages={1267--1280}, year={2003}, publisher={Society of Exploration Geophysicists} } @article{kotsi2020uncertainty, title={Uncertainty quantification in time-lapse seismic imaging: a full-waveform approach}, author={Kotsi, M and Malcolm, A and Ely, G}, journal={Geophysical Journal International}, volume={222}, number={2}, pages={1245--1263}, year={2020}, publisher={Oxford University Press} } @inproceedings{rezende2015variational, title={Variational inference with normalizing flows}, author={Rezende, Danilo and Mohamed, Shakir}, booktitle={International conference on machine learning}, pages={1530--1538}, year={2015}, organization={PMLR} } @article{dinh2016density, title={Density estimation using real nvp}, author={Dinh, Laurent and Sohl-Dickstein, Jascha and Bengio, Samy}, journal={arXiv preprint arXiv:1605.08803}, year={2016} } @article{lensink2019fully, title={Fully hyperbolic convolutional neural networks}, author={Lensink, Keegan and Peters, Bas and Haber, Eldad}, journal={arXiv preprint arXiv:1905.10484}, year={2019} } @article{kruse2019hint, title={HINT: Hierarchical invertible neural transport for density estimation and Bayesian inference}, author={Kruse, Jakob and Detommaso, Gianluca and Scheichl, Robert and K{\"o}the, Ullrich}, journal={arXiv preprint arXiv:1905.10687}, year={2019} } @conference {yin2020SEGesi, title = {Extended source imaging {\textendash}- a unifying framework for seismic and medical imaging}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2020}, note = {(SEG, virtual)}, month = {09}, pages = {3502-3506}, abstract = {We present three imaging modalities that live on the crossroads of seismic and medical imaging. Through the lens of extended source imaging, we can draw deep connections among the fields of wave-equation based seismic and medical imaging, despite first appearances. From the seismic perspective, we underline the importance to work with the correct physics and spatially varying velocity fields. Medical imaging, on the other hand, opens the possibility for new imaging modalities where outside stimuli, such as laser or radar pulses, can not only be used to identify endogenous optical or thermal contrasts but that these sources can also be used to insonify the medium so that images of the whole specimen can in principle be created.}, keywords = {medical imaging, SEG, seismic imaging, variable projection}, doi = {10.1190/segam2020-3426999.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/yin2020SEGesi/yin2020SEGesi.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/yin2020SEGesi/yin2020SEGesi_pres.pdf}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2020/yin2020SEGesi/yin2020SEGesi_pres.mp4}, software = {https://github.com/slimgroup/Software.SEG2020}, author = {Ziyi Yin and Rafael Orozco and Philipp A. Witte and Mathias Louboutin and Gabrio Rizzuti and Felix J. Herrmann} } @article{lu2015l1, title={L1 norm constrained migration of blended data with the FISTA algorithm}, author={Lu, Xinting and Han, Liguo and Yu, Jianglong and Chen, Xue}, journal={Journal of Geophysics and Engineering}, volume={12}, number={4}, pages={620--628}, year={2015}, publisher={Oxford University Press} } @book{bishop2006pattern, title={Pattern recognition and machine learning}, author={Bishop, Christopher M and Nasrabadi, Nasser M}, year={2006}, publisher={Springer} } @article{guitton2003robust, title={Robust inversion of seismic data using the Huber norm}, author={Guitton, Antoine and Symes, William W}, journal={Geophysics}, volume={68}, number={4}, pages={1310--1319}, year={2003}, publisher={Society of Exploration Geophysicists} } @article{virieux2009overview, title={An overview of full-waveform inversion in exploration geophysics}, author={Virieux, Jean and Operto, St{\'e}phane}, journal={Geophysics}, volume={74}, number={6}, pages={WCC1--WCC26}, year={2009}, publisher={Society of Exploration Geophysicists} } @conference {siahkoohi2021SEGlbe, title = {Learning by example: fast reliability-aware seismic imaging with normalizing flows}, booktitle = {SEG Technical Program Expanded Abstracts}, year = {2021}, note = {(IMAGE, Denver)}, month = {09}, pages = {1580-1585}, abstract = {Uncertainty quantification provides quantitative measures on the reliability of candidate solutions of ill-posed inverse problems. Due to their sequential nature, Monte Carlo sampling methods require large numbers of sampling steps for accurate Bayesian inference and are often computationally infeasible for large-scale inverse problems, such as seismic imaging. Our main contribution is a data-driven variational inference approach where we train a normalizing flow (NF), a type of invertible neural net, capable of cheaply sampling the posterior distribution given previously unseen seismic data from neighboring surveys. To arrive at this result, we train the NF on pairs of low- and high-fidelity migrated images. In our numerical example, we obtain high-fidelity images from the Parihaka dataset and low-fidelity images are derived from these images through the process of demigration, followed by adding noise and migration. During inference, given shot records from a new neighboring seismic survey, we first compute the reverse-time migration image. Next, by feeding this low-fidelity migrated image to the NF we gain access to samples from the posterior distribution virtually for free. We use these samples to compute a high-fidelity image including a first assessment of the image{\textquoteright}s reliability. To our knowledge, this is the first attempt to train a conditional network on what we know from neighboring images to improve the current and assess its reliability.}, keywords = {deep learning, Normalizing flows, SEG, seismic imaging, Variational Inference}, doi = {10.1190/segam2021-3581836.1}, url = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/siahkoohi2021SEGlbe/abstract.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/siahkoohi2021SEGlbe/Tue-09-15-siahkoohi.pdf}, url2 = {https://slim.gatech.edu/Publications/Public/Conferences/SEG/2021/siahkoohi2021SEGlbe/Tue-09-15-siahkoohi.mp4}, software = {https://github.com/slimgroup/Software.SEG2021}, author = {Ali Siahkoohi and Felix J. Herrmann} } @conference {orozco2021NIPSpicp, title = {Photoacoustic Imaging with Conditional Priors from Normalizing Flows}, booktitle = {Neural Information Processing Systems (NeurIPS)}, year = {2021}, note = {(NIPS, virtual)}, month = {12}, abstract = {Photoacoustic imaging is a biomedical imaging technique based on the photoacoustic effect. It leverages the interplay between optics and acoustics as a mean to circumvent the limitations of imaging modalities relying on single-type physics. Light beams generated by a pulsed laser can penetrate biological tissues by several centimeters, and are absorbed based on oxygen saturation or hemoglobin concentration. While optical absorption is in principle an ideal parameter for medical imaging (e.g., with respect to the detection of cancerous tissue), strong scattering imposes important limitations in its imaging resolution. Ultrasonics, on the other hand, can theoretically provide resolution of medical diagnostic value, but produce images of mechanical properties whose contrasts are not sensitive. In photoacoustics, optical and acoustic effects are combined to gain the best of both worlds. Under conditions of thermal and stress confinement, thermal energy can efficiently build up in biological tissues, which in turn undergo thermal expansion and effectively act as a spatially distributed acoustic source. In photoacoustic imaging, the actual object of interest is the induced source, as it is directly related to optical absorption and can be recovered with a relatively higher resolution than pure optical imaging, based on the acquired ultrasonic data.}, keywords = {conditional prior, deep image, MAP, NIPS, normalizing flow, Photoacoustic, Variational Inference}, url = {https://slim.gatech.edu/Publications/Public/Conferences/NIPS/2021/orozco2021NIPSpicp/deep_inverse_2021.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/NIPS/2021/orozco2021NIPSpicp/posterneurips2021_orozco.pdf}, url2 = {https://openreview.net/forum?id=woi1OTvROO1}, software = {https://github.com/slimgroup/InvertibleNetworks.jl}, author = {Rafael Orozco and Ali Siahkoohi and Gabrio Rizzuti and Tristan van Leeuwen and Felix J. Herrmann} } @misc{ali_siahkoohi_2021_4610094, author = {Ali Siahkoohi}, title = {{slimgroup/FastApproximateInference.jl: Paper version}}, month = mar, year = 2021, publisher = {Zenodo}, version = {v0.1.0}, doi = {10.5281/zenodo.4610094}, url = {https://doi.org/10.5281/zenodo.4610094} } @article{tarantola1984inversion, title={Inversion of seismic reflection data in the acoustic approximation}, author={Tarantola, Albert}, journal={Geophysics}, volume={49}, number={8}, pages={1259--1266}, year={1984}, publisher={Society of Exploration Geophysicists} } @article{pruess2011numerical, title={Numerical simulation studies of the long-term evolution of a {CO2} plume in a saline aquifer with a sloping caprock}, author={Pruess, Karsten and Nordbotten, Jan}, journal={Transport in porous media}, volume={90}, number={1}, pages={135--151}, year={2011}, publisher={Springer} } @article{wen2021towards, title={Towards a predictor for {CO2} plume migration using deep neural networks}, author={Wen, Gege and Tang, Meng and Benson, Sally M}, journal={International Journal of Greenhouse Gas Control}, volume={105}, pages={103223}, year={2021}, publisher={Elsevier} } @article{van2013fast, title={Fast waveform inversion without source-encoding}, author={van Leeuwen, Tristan and Herrmann, Felix J}, journal={Geophysical Prospecting}, volume={61}, pages={10--19}, year={2013}, publisher={Blackwell Publishing Ltd Oxford, UK} } @article{dosovitskiy2020image, title={An image is worth 16x16 words: Transformers for image recognition at scale}, author={Dosovitskiy, Alexey and Beyer, Lucas and Kolesnikov, Alexander and Weissenborn, Dirk and Zhai, Xiaohua and Unterthiner, Thomas and Dehghani, Mostafa and Minderer, Matthias and Heigold, Georg and Gelly, Sylvain and others}, journal={arXiv preprint arXiv:2010.11929}, year={2020} } @article{vaswani2017attention, title={Attention is all you need}, author={Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, {\L}ukasz and Polosukhin, Illia}, journal={Advances in neural information processing systems}, volume={30}, year={2017} } @article{chadwick2014co2, title={CO2 storage monitoring: leakage detection and measurement in subsurface volumes from 3D seismic data at Sleipner}, author={Chadwick, R Andrew and Marchant, Benjamin P and Williams, Gareth A}, journal={Energy Procedia}, volume={63}, pages={4224--4239}, year={2014}, publisher={Elsevier} } @incollection{newell2019overview, title={Overview of geological carbon storage (GCS)}, author={Newell, Pania and Ilgen, Anastasia G}, booktitle={Science of Carbon Storage in Deep Saline Formations}, pages={1--13}, year={2019}, publisher={Elsevier} } @techreport{pruess2006co2, title={On CO2 Behavior in the Subsurface, Following Leakage from aGeologic Storage Reservoir}, author={Pruess, Karsten}, year={2006}, institution={Lawrence Berkeley National Lab.(LBNL), Berkeley, CA (United States)} } @article{yosinski2014transferable, title={How transferable are features in deep neural networks?}, author={Yosinski, Jason and Clune, Jeff and Bengio, Yoshua and Lipson, Hod}, journal={Advances in neural information processing systems}, volume={27}, year={2014} } @inproceedings{zhou2016learning, title={Learning deep features for discriminative localization}, author={Zhou, Bolei and Khosla, Aditya and Lapedriza, Agata and Oliva, Aude and Torralba, Antonio}, booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, pages={2921--2929}, year={2016} } @article{siahkoohi2019importance, title={The importance of transfer learning in seismic modeling and imaging}, author={Siahkoohi, Ali and Louboutin, Mathias and Herrmann, Felix J}, journal={Geophysics}, volume={84}, number={6}, pages={A47--A52}, year={2019}, publisher={Society of Exploration Geophysicists} } @inproceedings{mackowiak2021generative, title={Generative classifiers as a basis for trustworthy image classification}, author={Mackowiak, Radek and Ardizzone, Lynton and Kothe, Ullrich and Rother, Carsten}, booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, pages={2971--2981}, year={2021} } @article{zhang2021survey, title={A survey on neural network interpretability}, author={Zhang, Yu and Ti{\v{n}}o, Peter and Leonardis, Ale{\v{s}} and Tang, Ke}, journal={IEEE Transactions on Emerging Topics in Computational Intelligence}, year={2021}, publisher={IEEE} } @article{hooker2019benchmark, title={A benchmark for interpretability methods in deep neural networks}, author={Hooker, Sara and Erhan, Dumitru and Kindermans, Pieter-Jan and Kim, Been}, journal={Advances in neural information processing systems}, volume={32}, year={2019} } @inproceedings{wang2020score, title={Score-CAM: Score-weighted visual explanations for convolutional neural networks}, author={Wang, Haofan and Wang, Zifan and Du, Mengnan and Yang, Fan and Zhang, Zijian and Ding, Sirui and Mardziel, Piotr and Hu, Xia}, booktitle={Proceedings of the IEEE/CVF conference on computer vision and pattern recognition workshops}, pages={24--25}, year={2020} } @article{klimentos1991effects, title={The effects of porosity-permeability-clay content on the velocity of compressional waves}, author={Klimentos, Theodoros}, journal={Geophysics}, volume={56}, number={12}, pages={1930--1939}, year={1991}, publisher={Society of Exploration Geophysicists} } @conference {witte2017EAGEspl, title = {Sparsity-promoting least-squares migration with the linearized inverse scattering imaging condition}, booktitle = {EAGE Annual Conference Proceedings}, year = {2017}, note = {(EAGE, Paris)}, month = {06}, abstract = {Reverse-time migration (RTM) with the conventional cross-correlation imaging condition suffers from low-frequency artifacts that result from backscattered energy in the background velocity models. This problem translates to least-squares reverse-time migration (LS-RTM), where these artifacts slow down the convergence, as many of the initial iterations are spent on removing them. In RTM, this problem has been successfully addressed by the introduction of the so-called inverse scattering imaging condition, which naturally removes these artifacts. In this work, we derive the corresponding linearized forward operator of the inverse scattering imaging operator and incorporate this forward/adjoint operator pair into a sparsity-promoting (SPLS-RTM) workflow. We demonstrate on a challenging salt model, that LS-RTM with the inverse scattering imaging condition is far less prone to low-frequency artifacts than the conventional cross-correlation imaging condition, improves the convergence and does not require any type of additional image filters within the inversion. Through source subsampling and sparsity promotion, we reduce the computational cost in terms of PDE solves to a number comparable to conventional RTM, making our workflow applicable to large-scale problems.}, keywords = {EAGE, imaging condition, least-squares migration, linearized Bregman}, doi = {10.3997/2214-4609.201701125}, url = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2017/witte2017EAGEspl/witte2017EAGEspl.html}, presentation = {https://slim.gatech.edu/Publications/Public/Conferences/EAGE/2017/witte2017EAGEspl/witte2017EAGEspl_poster.pdf}, author = {Philipp A. Witte and Mengmeng Yang and Felix J. Herrmann} } @article{t2012linearized, title={Linearized inverse scattering based on seismic reverse time migration}, author={'t Root, Tim JPM Op and Stolk, Christiaan C and Maarten, V}, journal={Journal de math{\'e}matiques pures et appliqu{\'e}es}, volume={98}, number={2}, pages={211--238}, year={2012}, publisher={Elsevier} } @article{kolster2018impact, title={The impact of time-varying CO2 injection rate on large scale storage in the UK Bunter Sandstone}, author={Kolster, Clea and Agada, Simeon and Mac Dowell, Niall and Krevor, Samuel}, journal={International Journal of Greenhouse Gas Control}, volume={68}, pages={77--85}, year={2018}, publisher={Elsevier} } @misc{jacobgilpytorchcam, title={PyTorch library for CAM methods}, author={Jacob Gildenblat and contributors}, year={2021}, publisher={GitHub}, howpublished={\url{https://github.com/jacobgil/pytorch-grad-cam}}, }