@article{cohen-cobos_sanders_degroot_guarnera_leary_lindner_manz_2024, title={Chemistry does general relativity: reaction-diffusion waves can model gravitational lensing}, volume={11}, ISSN={["2296-424X"]}, DOI={10.3389/fphy.2023.1315966}, abstractNote={Gravitational lensing is a general relativistic (GR) phenomenon where a massive object redirects light, deflecting, magnifying, and sometimes multiplying its source. We use reaction-diffusion (RD) Belousov-Zhabotinsky (BZ) chemistry to study this astronomical effect in a table-top experiment. We experimentally observe BZ waves passing through non-planar, quasi-two-dimensional molds and reproduce the waveforms in computer simulations using planar RD waves propagating with variable diffusion. We tune the variable diffusion to match the Schwarzschild-coordinate light speed near a spherical mass so the RD propagation approximates Einstein’s famous light deflection relation. We discuss varying the diffusion or reaction rates with a gel matrix or with illumination, electric field, or temperature gradients.}, journal={FRONTIERS IN PHYSICS}, author={Cohen-Cobos, Daniel and Sanders, Kiyomi and DeGroot, Laura and Guarnera, Heather and Leary, Cody and Lindner, John F. and Manz, Niklas}, year={2024}, month={Jan} } @article{fuller_cohen-cobos_lindner_manz_2024, title={Light-Sensitive Diffusion Diodes for Reaction-Diffusion Waves}, volume={19}, ISSN={["1548-7202"]}, DOI={10.32908/ijuc.v19.200823}, number={1}, journal={INTERNATIONAL JOURNAL OF UNCONVENTIONAL COMPUTING}, author={Fuller, Chase A. and Cohen-Cobos, Daniel and Lindner, John F. and Manz, Niklas}, year={2024}, pages={1–15} } @article{choudhary_radhakrishnan_lindner_sinha_ditto_2023, title={Neuronal diversity can improve machine learning for physics and beyond}, volume={13}, ISSN={["2045-2322"]}, DOI={10.1038/s41598-023-40766-6}, abstractNote={Abstract}, number={1}, journal={SCIENTIFIC REPORTS}, author={Choudhary, Anshul and Radhakrishnan, Anil and Lindner, John F. and Sinha, Sudeshna and Ditto, William L.}, year={2023}, month={Nov} } @article{holliday_lindner_ditto_2023, title={Solving quantum billiard eigenvalue problems with physics-informed machine learning}, volume={13}, ISSN={["2158-3226"]}, url={https://doi.org/10.1063/5.0161067}, DOI={10.1063/5.0161067}, abstractNote={A particle confined to an impassable box is a paradigmatic and exactly solvable one-dimensional quantum system modeled by an infinite square well potential. Here, we explore some of its infinitely many generalizations to two dimensions, including particles confined to rectangle-, ellipse-, triangle-, and cardioid-shaped boxes using physics-informed neural networks. In particular, we generalize an unsupervised learning algorithm to find the particles’ eigenvalues and eigenfunctions, even in cases where the eigenvalues are degenerate. During training, the neural network adjusts its weights and biases, one of which is the energy eigenvalue, so that its output approximately solves the stationary Schrödinger equation with normalized and mutually orthogonal eigenfunctions. The same procedure solves the Helmholtz equation for the harmonics and vibration modes of waves on drumheads or transverse magnetic modes of electromagnetic cavities. Related applications include quantum billiards, quantum chaos, and Laplacian spectra.}, number={8}, journal={AIP ADVANCES}, author={Holliday, Elliott G. G. and Lindner, John F. F. and Ditto, William L. L.}, year={2023}, month={Aug} } @article{xie_bae_lindner_2022, title={Alien suns reversing in exoplanet skies}, volume={12}, ISSN={["2045-2322"]}, DOI={10.1038/s41598-022-11527-8}, abstractNote={Abstract}, number={1}, journal={SCIENTIFIC REPORTS}, author={Xie, Xinchen and Bae, Hwan and Lindner, John F.}, year={2022}, month={May} } @article{choudhary_lindner_holliday_miller_sinha_ditto_2021, title={Forecasting Hamiltonian dynamics without canonical coordinates}, volume={103}, ISSN={["1573-269X"]}, DOI={10.1007/s11071-020-06185-2}, abstractNote={Conventional neural networks are universal function approximators, but they may need impractically many training data to approximate nonlinear dynamics. Recently introduced Hamiltonian neural networks can efficiently learn and forecast dynamical systems that conserve energy, but they require special inputs called canonical coordinates, which may be hard to infer from data. Here, we prepend a conventional neural network to a Hamiltonian neural network and show that the combination accurately forecasts Hamiltonian dynamics from generalised noncanonical coordinates. Examples include a predator–prey competition model where the canonical coordinates are nonlinear functions of the predator and prey populations, an elastic pendulum characterised by nontrivial coupling of radial and angular motion, a double pendulum each of whose canonical momenta are intricate nonlinear combinations of angular positions and velocities, and real-world video of a compound pendulum clock.}, number={2}, journal={NONLINEAR DYNAMICS}, author={Choudhary, Anshul and Lindner, John F. and Holliday, Elliott G. and Miller, Scott T. and Sinha, Sudeshna and Ditto, William L.}, year={2021}, month={Jan}, pages={1553–1562} } @article{miller_lindner_choudhary_sinha_ditto_2021, title={Negotiating the separatrix with machine learning}, volume={12}, ISSN={["2185-4106"]}, url={https://doi.org/10.1587/nolta.12.134}, DOI={10.1587/nolta.12.134}, abstractNote={: Physics-informed machine learning has recently been shown to efficiently learn complex trajectories of nonlinear dynamical systems, even when order and chaos coexist. However, care must be taken when one or more variables are unbounded, such as in rotations. Here we use the framework of Hamiltonian Neural Networks (HNN) to learn the complex dynamics of nonlinear single and double pendulums, which can both librate and rotate, by mapping the unbounded phase space onto a compact cylinder. We clearly demonstrate that our approach can successfully forecast the motion of these challenging systems, capable of both bounded and unbounded motion. It is also evident that HNN can yield an energy surface that closely matches the surface generated by the true Hamiltonian function. Further we observe that the relative energy error for HNN decreases as a power law with number of training pairs, with HNN clearly outperforming conventional neural networks quantitatively.}, number={2}, journal={IEICE NONLINEAR THEORY AND ITS APPLICATIONS}, author={Miller, Scott T. and Lindner, John F. and Choudhary, Anshul and Sinha, Sudeshna and Ditto, William L.}, year={2021}, pages={134–142} }