@article{asch_j. brady_gallardo_hood_chu_farazmand_2022, title={Model-assisted deep learning of rare extreme events from partial observations}, volume={32}, ISSN={["1089-7682"]}, url={https://doi.org/10.1063/5.0077646}, DOI={10.1063/5.0077646}, abstractNote={To predict rare extreme events using deep neural networks, one encounters the so-called small data problem because even long-term observations often contain few extreme events. Here, we investigate a model-assisted framework where the training data are obtained from numerical simulations, as opposed to observations, with adequate samples from extreme events. However, to ensure the trained networks are applicable in practice, the training is not performed on the full simulation data; instead, we only use a small subset of observable quantities, which can be measured in practice. We investigate the feasibility of this model-assisted framework on three different dynamical systems (Rössler attractor, FitzHugh-Nagumo model, and a turbulent fluid flow) and three different deep neural network architectures (feedforward, long short-term memory, and reservoir computing). In each case, we study the prediction accuracy, robustness to noise, reproducibility under repeated training, and sensitivity to the type of input data. In particular, we find long short-term memory networks to be most robust to noise and to yield relatively accurate predictions, while requiring minimal fine-tuning of the hyperparameters.}, number={4}, journal={CHAOS}, publisher={AIP Publishing}, author={Asch, Anna and J. Brady, Ethan and Gallardo, Hugo and Hood, John and Chu, Bryan and Farazmand, Mohammad}, year={2022}, month={Apr} } @article{chu_farazmand_2021, title={Data-driven prediction of multistable systems from sparse measurements}, volume={31}, ISSN={["1089-7682"]}, url={https://doi.org/10.1063/5.0046203}, DOI={10.1063/5.0046203}, abstractNote={We develop a data-driven method, based on semi-supervised classification, to predict the asymptotic state of multistable systems when only sparse spatial measurements of the system are feasible. Our method predicts the asymptotic behavior of an observed state by quantifying its proximity to the states in a precomputed library of data. To quantify this proximity, we introduce a sparsity-promoting metric-learning (SPML) optimization, which learns a metric directly from the precomputed data. The optimization problem is designed so that the resulting optimal metric satisfies two important properties: (i) it is compatible with the precomputed library and (ii) it is computable from sparse measurements. We prove that the proposed SPML optimization is convex, its minimizer is non-degenerate, and it is equivariant with respect to the scaling of the constraints. We demonstrate the application of this method on two multistable systems: a reaction-diffusion equation, arising in pattern formation, which has four asymptotically stable steady states, and a FitzHugh-Nagumo model with two asymptotically stable steady states. Classifications of the multistable reaction-diffusion equation based on SPML predict the asymptotic behavior of initial conditions based on two-point measurements with 95% accuracy when a moderate number of labeled data are used. For the FitzHugh-Nagumo, SPML predicts the asymptotic behavior of initial conditions from one-point measurements with 90% accuracy. The learned optimal metric also determines where the measurements need to be made to ensure accurate predictions.}, number={6}, journal={CHAOS}, author={Chu, Bryan and Farazmand, Mohammad}, year={2021}, month={Jun} }