@article{hollis_moore_wilson_clark_2024, title={From FMECA to Decision: A Fully Bayesian Reliability Process}, volume={29}, ISSN={["2163-2758"]}, DOI={10.5711/1082598329145}, number={1}, journal={MILITARY OPERATIONS RESEARCH}, author={Hollis, Andrew N. and Moore, Timothy A. and Wilson, Alyson G. and Clark, Nicholas J.}, year={2024} } @book{nuclear war_mathematical sciences_nuclear_international security_engineering_earth_policy_sciences_2024, title={Risk Analysis Methods for Nuclear War and Nuclear Terrorism}, url={http://dx.doi.org/10.17226/27745}, DOI={10.17226/27745}, author={Nuclear War, Committee and Mathematical Sciences, Board and Nuclear and International Security, Committee and Engineering, Division and Earth, Division and Policy and Sciences, Engineering}, year={2024}, month={May} } @article{hameed_johnston_younce_tang_wilson_2023, title={Motif-Based Exploratory Data Analysis for State-Backed Platform Manipulation on Twitter}, url={http://dx.doi.org/10.1609/icwsm.v17i1.22148}, DOI={10.1609/icwsm.v17i1.22148}, abstractNote={State-backed platform manipulation (SBPM) on Twitter has been a prominent public issue since the 2016 US election cycle. Identifying and characterizing users on Twitter as belonging to a state-backed campaign is an important part of mitigating their influence. In this paper, we propose a novel time series feature grounded in social science to characterize dynamic user networks on Twitter. We introduce a classification approach, motif functional data analysis (MFDA), that captures the evolution of motifs in temporal networks, which is a useful feature for analyzing malign influence. We evaluate MFDA on data from known SBPM campaigns on Twitter and representative authentic data and compare performance to other classification methods. To further leverage our dynamic feature, we use the changes in network structure captured by motifs to help uncover real-world events using anomaly detection.}, journal={Proceedings of the International AAAI Conference on Web and Social Media}, author={Hameed, Khuzaima and Johnston, Rob and Younce, Brent and Tang, Minh and Wilson, Alyson}, year={2023}, month={Jun} } @book{nuclear war_mathematical sciences_nuclear_international security_engineering_earth_policy_sciences_2023, title={Risk Analysis Methods for Nuclear War and Nuclear Terrorism}, url={http://dx.doi.org/10.17226/26609}, DOI={10.17226/26609}, author={Nuclear War, Committee and Mathematical Sciences, Board and Nuclear and International Security, Committee and Engineering, Division and Earth, Division and Policy and Sciences, Engineering}, year={2023}, month={Sep} } @book{nuclear war_mathematical sciences_nuclear_international security_engineering_earth_policy_sciences_2023, title={Risk Analysis Methods for Nuclear War and Nuclear Terrorism}, url={http://dx.doi.org/10.17226/27393}, DOI={10.17226/27393}, author={Nuclear War, Committee and Mathematical Sciences, Board and Nuclear and International Security, Committee and Engineering, Division and Earth, Division and Policy and Sciences, Engineering}, year={2023}, month={Nov} } @article{bakerman_pazdernik_korkmaz_wilson_2022, title={Dynamic logistic regression and variable selection: Forecasting and contextualizing civil unrest}, volume={38}, ISSN={["1872-8200"]}, url={https://doi.org/10.1016/j.ijforecast.2021.07.003}, DOI={10.1016/j.ijforecast.2021.07.003}, abstractNote={Civil unrest can range from peaceful protest to violent furor, and researchers are working to monitor, forecast, and assess such events to allocate resources better. Twitter has become a real-time data source for forecasting civil unrest because millions of people use the platform as a social outlet. Daily word counts are used as model features, and predictive terms contextualize the reasons for the protest. To forecast civil unrest and infer the reasons for the protest, we consider the problem of Bayesian variable selection for the dynamic logistic regression model and propose using penalized credible regions to select parameters of the updated state vector. This method avoids the need for shrinkage priors, is scalable to high-dimensional dynamic data, and allows the importance of variables to vary in time as new information becomes available. A substantial improvement in both precision and F1-score using this approach is demonstrated through simulation. Finally, we apply the proposed model fitting and variable selection methodology to the problem of forecasting civil unrest in Latin America. Our dynamic logistic regression approach shows improved accuracy compared to the static approach currently used in event prediction and feature selection.}, number={2}, journal={INTERNATIONAL JOURNAL OF FORECASTING}, publisher={Elsevier BV}, author={Bakerman, Jordan and Pazdernik, Karl and Korkmaz, Gizem and Wilson, Alyson G.}, year={2022}, pages={648–661} } @book{emerging hazards in commercial aviation-report 1: initial assessment of safety data and analysis processes_2022, url={http://dx.doi.org/10.17226/26673}, DOI={10.17226/26673}, abstractNote={Download a PDF of "Emerging Hazards in Commercial Aviation—Report 1: Initial Assessment of Safety Data and Analysis Processes" by the National Academies of Sciences, Engineering, and Medicine for free.}, journal={Transportation Research Board}, year={2022}, month={Aug} } @article{wendelberger_gray_wilson_houborg_reich_2022, title={Multiresolution Broad Area Search: Monitoring Spatial Characteristics of Gapless Remote Sensing Data}, url={https://doi.org/10.6339/22-JDS1072}, DOI={10.6339/22-JDS1072}, abstractNote={Global earth monitoring aims to identify and characterize land cover change like construction as it occurs. Remote sensing makes it possible to collect large amounts of data in near real-time over vast geographic areas and is becoming available in increasingly fine temporal and spatial resolution. Many methods have been developed for data from a single pixel, but monitoring pixel-wise spectral measurements over time neglects spatial relationships, which become more important as change manifests in a greater number of pixels in higher resolution imagery compared to moderate resolution. Building on our previous robust online Bayesian monitoring (roboBayes) algorithm, we propose monitoring multiresolution signals based on a wavelet decomposition to capture spatial change coherence on several scales to detect change sites. Monitoring only a subset of relevant signals reduces the computational burden. The decomposition relies on gapless data; we use 3 m Planet Fusion Monitoring data. Simulations demonstrate the superiority of the spatial signals in multiresolution roboBayes (MR roboBayes) for detecting subtle changes compared to pixel-wise roboBayes. We use MR roboBayes to detect construction changes in two regions with distinct land cover and seasonal characteristics: Jacksonville, FL (USA) and Dubai (UAE). It achieves site detection with less than two thirds of the monitoring processes required for pixel-wise roboBayes at the same resolution.}, journal={Journal of Data Science}, author={Wendelberger, Laura J. and Gray, Josh M. and Wilson, Alyson G. and Houborg, Rasmus and Reich, Brian J.}, year={2022} } @book{standards_nist technical programs_board_engineering_sciences_2021, title={An Assessment of Selected Divisions of the Information Technology Laboratory at the National Institute of Standards and Technology}, url={http://dx.doi.org/10.17226/26354}, DOI={10.17226/26354}, abstractNote={Download a PDF of "An Assessment of Selected Divisions of the Information Technology Laboratory at the National Institute of Standards and Technology" by the National Academies of Sciences, Engineering, and Medicine for free.}, journal={National Academies Press}, author={Standards, Panel and NIST Technical Programs, Committee and Board, Laboratory Assessments and Engineering, Division and Sciences, Engineering}, year={2021}, month={Nov} } @book{data use_mathematical sciences_applied_board_science_higher education_national statistics_engineering_policy_behavioral_et al._2021, title={Empowering the Defense Acquisition Workforce to Improve Mission Outcomes Using Data Science}, url={http://dx.doi.org/10.17226/25979}, DOI={10.17226/25979}, journal={National Academies Press}, author={Data Use, Committee and Mathematical Sciences, Board and Applied, Committee and Board, Air Force Studies and Science, Computer and Higher Education, Board and National Statistics, Committee and Engineering, Division and Policy and Behavioral, Division and et al.}, year={2021}, month={Aug} } @article{wendelberger_reich_wilson_2021, title={Multi-model penalized regression}, volume={14}, ISSN={["1932-1872"]}, DOI={10.1002/sam.11496}, abstractNote={Abstract}, number={6}, journal={STATISTICAL ANALYSIS AND DATA MINING}, author={Wendelberger, Laura J. and Reich, Brian J. and Wilson, Alyson G.}, year={2021}, month={Dec}, pages={698–722} } @article{cahoon_sanborn_wilson_2021, title={Practical reliability growth modeling}, url={https://doi.org/10.1002/qre.2822}, DOI={10.1002/qre.2822}, abstractNote={Abstract}, journal={Quality and Reliability Engineering International}, author={Cahoon, Joyce and Sanborn, Kate and Wilson, Alyson}, year={2021}, month={Nov} } @article{hollis_smith_wilson_2021, title={SURROGATE BASED MUTUAL INFORMATION APPROXIMATION AND OPTIMIZATION FOR URBAN SOURCE LOCALIZATION}, volume={11}, ISSN={["2152-5099"]}, url={http://dx.doi.org/10.1615/int.j.uncertaintyquantification.2021034400}, DOI={10.1615/Int.J.UncertaintyQuantification.2021034400}, abstractNote={The ability to efficiently and accurately localize potentially threatening nuclear radiation sources in urban environments is of critical importance to national security. Techniques to infer the location and intensity of a source using data from a configuration of radiation detectors, and the effectiveness of the source localization depends critically on how the detectors are configured. In this paper, we introduce a framework that uses surrogate models to efficiently compare and optimize different detector configurations. We compare our technique to others and demonstrate its effectiveness for selecting optimal detector configurations in the context of urban source localization.}, number={5}, journal={INTERNATIONAL JOURNAL FOR UNCERTAINTY QUANTIFICATION}, publisher={Begell House}, author={Hollis, Andrew N. and Smith, Ralph C. and Wilson, Alyson G.}, year={2021}, pages={39–55} } @article{chakraborty_lahiri_wilson_2020, title={A STATISTICAL ANALYSIS OF NOISY CROWDSOURCED WEATHER DATA}, volume={14}, ISSN={["1932-6157"]}, DOI={10.1214/19-AOAS1290}, abstractNote={Spatial prediction of weather-elements like temperature, precipitation, and barometric pressure are generally based on satellite imagery or data collected at ground-stations. None of these data provide information at a more granular or "hyper-local" resolution. On the other hand, crowdsourced weather data, which are captured by sensors installed on mobile devices and gathered by weather-related mobile apps like WeatherSignal and AccuWeather, can serve as potential data sources for analyzing environmental processes at a hyper-local resolution. However, due to the low quality of the sensors and the non-laboratory environment, the quality of the observations in crowdsourced data is compromised. This paper describes methods to improve hyper-local spatial prediction using this varying-quality noisy crowdsourced information. We introduce a reliability metric, namely Veracity Score (VS), to assess the quality of the crowdsourced observations using a coarser, but high-quality, reference data. A VS-based methodology to analyze noisy spatial data is proposed and evaluated through extensive simulations. The merits of the proposed approach are illustrated through case studies analyzing crowdsourced daily average ambient temperature readings for one day in the contiguous United States.}, number={1}, journal={ANNALS OF APPLIED STATISTICS}, author={Chakraborty, Arnab and Lahiri, Soumendra Nath and Wilson, Alyson}, year={2020}, month={Mar}, pages={116–142} } @inbook{algorithms in diffraction profile analysis_2020, DOI={doi.org/10.1142/11389}, abstractNote={We demonstrate that the recently developed Optimal Uncertainty Quantification (OUQ) theory, combined with recent software enabling fast global solutions of constrained non-convex optimization problems, provides a methodology for rigorous model certification, validation, and optimal design under uncertainty. In particular, we show the utility of the OUQ approach to understanding the behavior of a system that is governed by a partial differential equation -- Burgers' equation. We solve the problem of predicting shock location when we only know bounds on viscosity and on the initial conditions. Through this example, we demonstrate the potential to apply OUQ to complex physical systems, such as systems governed by coupled partial differential equations. We compare our results to those obtained using a standard Monte Carlo approach, and show that OUQ provides more accurate bounds at a lower computational cost. We discuss briefly about how to extend this approach to more complex systems, and how to integrate our approach into a more ambitious program of optimal experimental design.}, booktitle={Handbook on Big Data and Machine Learning in the Physical Sciences}, year={2020}, month={May} } @article{broughton_o'donnell_gabilondo_chung_maggard_wilson_reich_smith_jones_2020, title={Bayesian refinement of full profile diffraction patterns for uncertainty quantification}, url={http://dx.doi.org/10.1107/s0108767320099560}, DOI={10.1107/s0108767320099560}, abstractNote={Advancements in x-ray and neutron characterization instruments provide the ability to examine the structures of newly developed materials.Full profile diffraction patterns collected from these instruments can be analyzed through Rietveld refinements using least-squares fitting routines to obtain sample, structural, lattice, and instrumental parameters.An alternative statistical framework using a Bayesian analysis method can also be applied to full profile fitting in order to quantify the uncertainty in the model parameters [1].In Bayesian inference, parameters are taken to be random variables having associated posterior distributions.This representation provides descriptive insight to the uncertainties in the parameters and the model fits.In this work, Rietveld refinements are initially performed with GSAS-II [2] on the metastable perovskite material system of (Ba 1-x Sn x )(Zr 0.5 Ti 0.5 )O 3 (BSZT) [3].The Bayesian refinement method is then employed to enrich the model information, specifically to provide higher fidelity uncertainty quantification on the atomic occupancies and positions.The effects of refining instrumental parameters via the Bayesian method are also investigated.[1]}, journal={Acta Crystallographica Section A Foundations and Advances}, author={Broughton, Rachel and O'Donnell, Shaun and Gabilondo, Eric and Chung, Ching-Chang and Maggard, Paul and Wilson, Alyson and Reich, Brian and Smith, Ralph and Jones, Jacob}, year={2020}, month={Aug} } @article{lee_rathsam_wilson_2020, title={Bayesian statistical models for community annoyance survey data}, url={http://dx.doi.org/10.1121/10.0001021}, DOI={10.1121/10.0001021}, abstractNote={This paper demonstrates the use of two Bayesian statistical models to analyze single-event sonic boom exposure and human annoyance data from community response surveys. Each model is fit to data from a NASA pilot study. Unlike many community noise surveys, this study used a panel sample to collect multiple observations per participant instead of a single observation. Thus, a multilevel (also known as hierarchical or mixed-effects) model is used to account for the within-subject correlation in the panel sample data. This paper describes a multilevel logistic regression model and a multilevel ordinal regression model. The paper also proposes a method for calculating a summary dose-response curve from the multilevel models that represents the population. The two models' summary dose-response curves are visually similar. However, their estimates differ when calculating the noise dose at a fixed percent highly annoyed.}, journal={The Journal of the Acoustical Society of America}, author={Lee, Jasme and Rathsam, Jonathan and Wilson, Alyson}, year={2020}, month={Apr} } @book{mathematical sciences_engineering_sciences_2020, title={Improving Defense Acquisition Workforce Capability in Data Use}, url={http://dx.doi.org/10.17226/25922}, DOI={10.17226/25922}, journal={National Academies Press}, author={Mathematical Sciences, Board and Engineering, Division and Sciences, Engineering}, year={2020}, month={Oct} } @article{durodoye_gumpertz_wilson_griffith_ahmad_2020, title={Tenure and Promotion Outcomes at Four Large Land Grant Universities: Examining the Role of Gender, Race, and Academic Discipline}, volume={61}, ISSN={["1573-188X"]}, DOI={10.1007/s11162-019-09573-9}, number={5}, journal={RESEARCH IN HIGHER EDUCATION}, author={Durodoye, Raifu, Jr. and Gumpertz, Marcia and Wilson, Alyson and Griffith, Emily and Ahmad, Seher}, year={2020}, month={Aug}, pages={628–651} } @article{gilman_fronczyk_wilson_2019, title={Bayesian modeling and test planning for multiphase reliability assessment}, volume={35}, ISBN={1099-1638}, DOI={10.1002/qre.2406}, abstractNote={Abstract}, number={3}, journal={QUALITY AND RELIABILITY ENGINEERING INTERNATIONAL}, author={Gilman, James F. and Fronczyk, Kassandra M. and Wilson, Alyson G.}, year={2019}, month={Apr}, pages={750–760} } @article{tian_bondell_wilson_2019, title={Bayesian variable selection for logistic regression}, volume={12}, ISSN={["1932-1872"]}, DOI={10.1002/sam.11428}, abstractNote={Abstract}, number={5}, journal={STATISTICAL ANALYSIS AND DATA MINING}, author={Tian, Yiqing and Bondell, Howard D. and Wilson, Alyson}, year={2019}, month={Oct}, pages={378–393} } @article{typhina_wilson_2019, title={Discussion on “Effective interdisciplinary collaboration between statisticians and other subject matter experts”}, volume={31}, url={https://doi.org/10.1080/08982112.2018.1539233}, DOI={10.1080/08982112.2018.1539233}, abstractNote={Abstract Anderson-Cook, Lu, and Parker’s article offers numerous suggestions for ways statisticians can facilitate effective interdisciplinary collaboration, with particular focus on project teams. Their article comes at a time when the importance of collaboration to support innovation is becoming more broadly recognized, bringing with it the inherent challenges of engaging in collaboration. In our discussion, we expand on Anderson-Cook et al.’s insights by describing our experiences working with collaborators from different disciplines and sectors. We contextualize our recommendations with examples of collaborations from our organization, the Laboratory for Analytic Sciences.}, number={1}, journal={Quality Engineering}, publisher={Informa UK Limited}, author={Typhina, Eli and Wilson, Alyson}, year={2019}, month={Jan}, pages={192–194} } @article{wilson_schmidt_schmidt_winter_2019, title={Immersive Collaboration on Data Science for Intelligence Analysis}, url={http://dx.doi.org/10.1162/99608f92.4a9eef8d}, DOI={10.1162/99608f92.4a9eef8d}, abstractNote={founded the Laboratory for Analytic Sciences (LAS) at North Carolina State University (NCSU) to help the Intelligence Community (IC) address the growing complexity of big data challenges.The goal of LAS is to partner experts and practitioners from academia, government, and industry to create tools and techniques that help intelligence analysts provide better information to the decision makers who need it.}, journal={Harvard Data Science Review}, author={Wilson, Alyson and Schmidt, Matthew and Schmidt, Lara and Winter, Brent}, year={2019}, month={Nov} } @article{gasior_wagner_cores_caspar_wilson_bhattacharya_hauck_2019, title={The role of cellular contact and TGF-beta signaling in the activation of the epithelial mesenchymal transition (EMT)}, volume={13}, ISSN={["1933-6926"]}, url={https://doi.org/10.1080/19336918.2018.1526597}, DOI={10.1080/19336918.2018.1526597}, abstractNote={ABSTRACT The epithelial mesenchymal transition (EMT) is one step in the process through which carcinoma cells metastasize by gaining the cellular mobility associated with mesenchymal cells. This work examines the dual influence of the TGF-β pathway and intercellular contact on the activation of EMT in colon (SW480) and breast (MCF7) carcinoma cells. While the SW480 population revealed an intermediate state between the epithelial and mesenchymal states, the MC7 cells exhibited highly adhesive behavior. However, for both cell lines, an exogenous TGF-β signal and a reduction in cellular confluence can push a subgroup of the population towards the mesenchymal phenotype. Together, these results highlight that, while EMT is induced by the synergy of multiple signals, this activation varies across cell types.}, number={1}, journal={CELL ADHESION & MIGRATION}, publisher={Informa UK Limited}, author={Gasior, Kelsey and Wagner, Nikki J. and Cores, Jhon and Caspar, Rose and Wilson, Alyson and Bhattacharya, Sudin and Hauck, Marlene L.}, year={2019}, pages={63–75} } @article{jones_broughton_iamsasri_fancher_wilson_reich_smith_2019, title={The use of Bayesian inference in the characterization of materials and thin films}, volume={75}, ISSN={["2053-2733"]}, DOI={10.1107/S0108767319097940}, journal={ACTA CRYSTALLOGRAPHICA A-FOUNDATION AND ADVANCES}, author={Jones, Jacob L. and Broughton, Rachel and Iamsasri, Thanakorn and Fancher, Chris M. and Wilson, Alyson G. and Reich, Brian and Smith, Ralph C.}, year={2019}, pages={A211–A211} } @inbook{paterson_reich_smith_wilson_jones_2018, title={Bayesian Approaches to Uncertainty Quantification and Structure Refinement from X-Ray Diffraction}, ISBN={9783319994642 9783319994659}, ISSN={0933-033X 2196-2812}, url={http://dx.doi.org/10.1007/978-3-319-99465-9_4}, DOI={10.1007/978-3-319-99465-9_4}, abstractNote={This chapter introduces classical frequentist and Bayesian inference applied to analyzing diffraction profiles, and the methods are compared and contrasted. The methods are applied to both the modelling of single diffraction profiles and the full profile refinement of crystallographic structures. In the Bayesian method, Markov chain Monte Carlo algorithms are used to sample the distribution of model parameters, allowing for the construction of posterior probability distributions, which provide both parameter estimates and quantifiable uncertainties. We present the application of this method to single peak fitting in lead zirconate titanate, and the crystal structure refinement of a National Institute of Standards and Technology silicon standard reference material.}, booktitle={Materials Discovery and Design}, publisher={Springer International Publishing}, author={Paterson, Alisa R. and Reich, Brian J. and Smith, Ralph C. and Wilson, Alyson G. and Jones, Jacob L.}, year={2018}, pages={81–102} } @article{rendon_wilson_stegall_2018, title={Is it "Fake News'? Intelligence Community expertise and news dissemination as measurements for media reliability}, volume={33}, ISSN={["1743-9019"]}, DOI={10.1080/02684527.2018.1507381}, abstractNote={ABSTRACT Self-communication platforms have generated a myriad of outlets and news producers that represent a challenge for modern societies. Therefore, it is relevant to explore new measurements that can help understand whether a specific outlet disseminating news could be considered reliable or not. This study is based on the expertise from the U.S. Intelligence Community to offer a statistical model that replicates the reliability measurements based on intelligence expertise. The results suggest that a classification algorithm could be useful to measure news media reliability. Additionally, different variables were identified to predict perceptions of media reliability.}, number={7}, journal={INTELLIGENCE AND NATIONAL SECURITY}, author={Rendon, Hector and Wilson, Alyson and Stegall, Jared}, year={2018}, pages={1040–1052} } @book{behavioral_behavioral_sciences_2018, title={Learning from the Science of Cognition and Perception for Decision Making}, url={http://dx.doi.org/10.17226/25118}, DOI={10.17226/25118}, journal={National Academies Press}, author={Behavioral, Cognitive and Behavioral, Division and Sciences, Engineering}, year={2018}, month={Jun} } @article{zoh_wilson_vander wiel_lawrence_2018, title={The negative log-gamma prior distribution for Bayesian assessment of system reliability}, volume={232}, ISSN={["1748-0078"]}, DOI={10.1177/1748006x17692154}, abstractNote={ This paper presents the negative log-gamma distribution as a prior distribution useful for Bayesian assessment of system reliability. When the scale parameter is held fixed, the negative log-gamma distribution is closed under products, making it convenient for specifying priors for series systems. In particular, for series systems, negative log-gamma component priors can be specified to give an exact desired system prior and vice versa. We consider pass/fail data at the system and component levels for both static and time-varying data collection schemes and propose two new prior distributions for analyzing time-varying reliability. Finally, we consider an application of the negative log-gamma to a missile reliability problem and illustrate diagnostics useful for developing the priors. }, number={3}, journal={PROCEEDINGS OF THE INSTITUTION OF MECHANICAL ENGINEERS PART O-JOURNAL OF RISK AND RELIABILITY}, author={Zoh, Roger and Wilson, Alyson and Vander Wiel, Scott and Lawrence, Earl}, year={2018}, month={Jun}, pages={308–319} } @article{bakerman_pazdernik_wilson_fairchild_bahran_2018, title={Twitter Geolocation}, url={http://dx.doi.org/10.1145/3178112}, DOI={10.1145/3178112}, abstractNote={Geotagging Twitter messages is an important tool for event detection and enrichment. Despite the availability of both social media content and user network information, these two features are generally utilized separately in the methodology. In this article, we create a hybrid method that uses Twitter content and network information jointly as model features. We use Gaussian mixture models to map the raw spatial distribution of the model features to a predicted field. This approach is scalable to large datasets and provides a natural representation of model confidence. Our method is tested against other approaches and we achieve greater prediction accuracy. The model also improves both precision and coverage.}, journal={ACM Transactions on Knowledge Discovery from Data}, author={Bakerman, Jordan and Pazdernik, Karl and Wilson, Alyson and Fairchild, Geoffrey and Bahran, Rian}, year={2018}, month={Jun} } @article{bakerman_pazdernik_wilson_fairchild_bahran_2018, title={Twitter geolocation: A hybrid approach}, volume={12}, number={3}, journal={ACM Transactions on Knowledge Discovery from Data}, author={Bakerman, J. and Pazdernik, K. and Wilson, A. and Fairchild, G. and Bahran, R.}, year={2018} } @inproceedings{ hierarchical bayesian modeling of atomic structural disorder_2017, booktitle={Proceedings of the International Conference on Mathematics and Computational Methods Applied to Nuclear Science and Engineering}, year={2017}, month={Apr} } @article{iamsasri_guerrier_esteves_fancher_wilson_smith_paisley_johnson-wilke_ihlefeld_bassiri-gharb_et al._2017, title={A Bayesian approach to modeling diffraction profiles and application to ferroelectric materials}, volume={50}, ISSN={1600-5767}, url={http://dx.doi.org/10.1107/S1600576716020057}, DOI={10.1107/s1600576716020057}, abstractNote={A new statistical approach for modeling diffraction profiles is introduced, using Bayesian inference and a Markov chain Monte Carlo (MCMC) algorithm. This method is demonstrated by modeling the degenerate reflections during application of an electric field to two different ferroelectric materials: thin-film lead zirconate titanate (PZT) of composition PbZr0.3Ti0.7O3 and a bulk commercial PZT polycrystalline ferroelectric. The new method offers a unique uncertainty quantification of the model parameters that can be readily propagated into new calculated parameters.}, number={1}, journal={Journal of Applied Crystallography}, publisher={International Union of Crystallography (IUCr)}, author={Iamsasri, Thanakorn and Guerrier, Jonathon and Esteves, Giovanni and Fancher, Chris M. and Wilson, Alyson G. and Smith, Ralph C. and Paisley, Elizabeth A. and Johnson-Wilke, Raegan and Ihlefeld, Jon F. and Bassiri-Gharb, Nazanin and et al.}, year={2017}, month={Feb}, pages={211–220} } @article{weaver_hamada_wilson_bakerman_2017, title={Bayesian assurance tests for degradation data}, volume={33}, ISSN={0748-8017}, url={http://dx.doi.org/10.1002/QRE.2228}, DOI={10.1002/QRE.2228}, abstractNote={Abstract}, number={8}, journal={Quality and Reliability Engineering International}, publisher={Wiley}, author={Weaver, B.P. and Hamada, M.S. and Wilson, A.G. and Bakerman, J.E.}, year={2017}, month={Oct}, pages={2699–2709} } @article{wilson_fronczyk_2017, title={Bayesian reliability: Combining information}, volume={29}, number={1}, journal={Quality Engineering}, author={Wilson, A. G. and Fronczyk, K. M.}, year={2017}, pages={119–129} } @misc{wilson_fronczyk_2017, title={National Security Risk Analysis}, url={http://dx.doi.org/10.1002/9781118445112.stat07971}, DOI={10.1002/9781118445112.stat07971}, abstractNote={Abstract Risk analysis provides a scientific basis for making decisions in the presence of uncertainty. It is an essential tool to identify, manage, communicate, and assess risks. Each of these steps is complex and requires careful thought. Risk assessment of rare events, particularly in the reliability arena, and/or decisions against an intelligent adversary are of significant importance in the national security context. For a quantitative assessment, there are a multitude of proposed frameworks in the literature. Given the need to leverage multiple sources of information and the general sparsity of data, these methods primarily follow the Bayesian paradigm. A brief introduction to the Bayesian approach is provided, along with notes and considerations for several quantitative risk assessment techniques.}, journal={Wiley StatsRef: Statistics Reference Online}, author={Wilson, Alyson G. and Fronczyk, Kassandra M.}, year={2017}, month={Nov} } @article{gumpertz_durodoye_griffith_wilson_2017, title={Retention and promotion of women and underrepresented minority faculty in science and engineering at four large land grant institutions}, url={http://dx.doi.org/10.1371/journal.pone.0187285}, DOI={10.1371/journal.pone.0187285}, abstractNote={In the most recent cohort, 2002-2015, the experiences of men and women differed substantially among STEM disciplines. Female assistant professors were more likely than men to leave the institution and to leave without tenure in engineering, but not in the agricultural, biological and biomedical sciences and natural resources or physical and mathematical sciences. In contrast, the median times to promotion from associate to full professor were similar for women and men in engineering and the physical and mathematical sciences, but one to two years longer for women than men in the agricultural, biological and biomedical sciences and natural resources.URM faculty hiring is increasing, but is well below the proportions earning doctoral degrees in STEM disciplines. The results are variable and because of the small numbers of URM faculty, the precision and power for comparing URM faculty to other faculty were low. In three of the four institutions, lower fractions of URM faculty than other faculty hired in the 2002-2006 time frame left without tenure. Also, in the biological and biomedical and physical and mathematical sciences no URM faculty left without tenure. On the other hand, at two of the institutions, significantly more URM faculty left before their tenth anniversary than other faculty and in engineering significantly more URM faculty than other faculty left before their tenth anniversary. We did not find significant differences in promotion patterns between URM and other faculty.}, journal={PLOS ONE}, author={Gumpertz, Marcia and Durodoye, Raifu and Griffith, Emily and Wilson, Alyson}, year={2017}, month={Nov} } @article{zhang_wilson_2017, title={System Reliability and Component Importance Under Dependence: A Copula Approach}, volume={59}, ISSN={["1537-2723"]}, DOI={10.1080/00401706.2016.1142907}, abstractNote={ABSTRACT System reliability and component importance are of great interest in reliability modeling, especially when the components within the system are dependent. We characterize the influence of dependence structures on system reliability and component importance in coherent systems with discrete marginal distributions. The effects of dependence are captured through copula theory. We extend our framework to coherent multi-state system. Applications of the derived results are demonstrated using a Gaussian copula, which yields simple interpretations. Simulations and two examples are presented to demonstrate the importance of modeling dependence when estimating system reliability and ranking of component importance. Proofs, algorithms, code, and data are provided in supplementary materials available online.}, number={2}, journal={TECHNOMETRICS}, author={Zhang, Xiang and Wilson, Alyson}, year={2017}, pages={215–224} } @article{a bayesian approach to evaluation of operational testing of land warfare systems_2016, DOI={10.5711/1082598321423}, journal={Military Operations Research}, year={2016}, month={Jan} } @article{wilson_fronczyk_2016, title={Bayesian Reliability: Combining Information}, volume={8}, ISSN={0898-2112 1532-4222}, url={http://dx.doi.org/10.1080/08982112.2016.1211889}, DOI={10.1080/08982112.2016.1211889}, abstractNote={ABSTRACT One of the most powerful features of Bayesian analyses is the ability to combine multiple sources of information in a principled way to perform inference. This feature can be particularly valuable in assessing the reliability of systems where testing is limited. At their most basic, Bayesian methods for reliability develop informative prior distributions using expert judgment or similar systems. Appropriate models allow the incorporation of many other sources of information, including historical data, information from similar systems, and computer models. We introduce the Bayesian approach to reliability using several examples and point to open problems and areas for future work.}, journal={Quality Engineering}, publisher={Informa UK Limited}, author={Wilson, Alyson G. and Fronczyk, Kassandra M.}, year={2016}, month={Aug}, pages={0–0} } @inproceedings{lenhardt_conway_scott_blanton_krishnamurthy_hadzikadic_vouk_wilson_2016, title={Cross-institutional research cyberinfrastructure for data intensive science}, DOI={10.1109/hpec.2016.7761597}, abstractNote={This paper describes a multi-institution effort to develop a “data science as a service” platform. This platform integrates advanced federated data management for small to large datasets, access to high performance computing, distributed computing and advanced networking. The goal is to develop a platform that is flexible and extensible while still supporting domain research and avoiding the walled garden problem. Some preliminary lessons learned and next steps will also be outlined.}, booktitle={2016 ieee high performance extreme computing conference (hpec)}, author={Lenhardt, W. C. and Conway, M. and Scott, E. and Blanton, B. and Krishnamurthy, A. and Hadzikadic, M. and Vouk, M. and Wilson, Alyson}, year={2016} } @article{graham_rotroff_marvel_buse_havener_wilson_wagner_motsinger-reif_2016, title={Incorporating Concomitant Medications into Genome-Wide Analyses for the Study of Complex Disease and Drug Response}, volume={7}, url={http://dx.doi.org/10.3389/fgene.2016.00138}, DOI={10.3389/fgene.2016.00138}, abstractNote={Given the high costs of conducting a drug-response trial, researchers are now aiming to use retrospective analyses to conduct genome-wide association studies (GWAS) to identify underlying genetic contributions to drug-response variation. To prevent confounding results from a GWAS to investigate drug response, it is necessary to account for concomitant medications, defined as any medication taken concurrently with the primary medication being investigated. We use data from the Action to Control Cardiovascular Disease (ACCORD) trial in order to implement a novel scoring procedure for incorporating concomitant medication information into a linear regression model in preparation for GWAS. In order to accomplish this, two primary medications were selected: thiazolidinediones and metformin because of the wide-spread use of these medications and large sample sizes available within the ACCORD trial. A third medication, fenofibrate, along with a known confounding medication, statin, were chosen as a proof-of-principle for the scoring procedure. Previous studies have identified SNP rs7412 as being associated with statin response. Here we hypothesize that including the score for statin as a covariate in the GWAS model will correct for confounding of statin and yield a change in association at rs7412. The response of the confounded signal was successfully diminished from p=3.19*10-7 to p=1.76*10-5, by accounting for statin using the scoring procedure presented here. This approach provides the ability for researchers to account for concomitant medications in complex trial designs where monotherapy treatment regimens are not available.}, journal={Frontiers in Genetics}, author={Graham, Hillary T. and Rotroff, Daniel M. and Marvel, Skylar W. and Buse, John B. and Havener, Tammy M. and Wilson, Alyson G. and Wagner, Michael J. and Motsinger-Reif, Alison A.}, year={2016}, month={Aug} } @article{fancher_han_levin_page_reich_smith_wilson_jones_2016, title={Use of Bayesian Inference in Crystallographic Structure Refinement via Full Diffraction Profile Analysis}, volume={6}, ISSN={2045-2322}, url={http://dx.doi.org/10.1038/srep31625}, DOI={10.1038/srep31625}, abstractNote={Abstract}, number={1}, journal={Scientific Reports}, publisher={Springer Science and Business Media LLC}, author={Fancher, Chris M. and Han, Zhen and Levin, Igor and Page, Katharine and Reich, Brian J. and Smith, Ralph C. and Wilson, Alyson G. and Jones, Jacob L.}, year={2016}, month={Aug}, pages={31625} } @article{stracuzzi_brost_phillips_robinson_wilson_woodbridge_2015, title={Computing quality scores and uncertainty for approximate pattern matching in geospatial semantic graphs}, volume={8}, ISSN={["1932-1872"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84944877405&partnerID=MN8TOARS}, DOI={10.1002/sam.11294}, abstractNote={Abstract}, number={5-6}, journal={STATISTICAL ANALYSIS AND DATA MINING}, author={Stracuzzi, David J. and Brost, Randy C. and Phillips, Cynthia A. and Robinson, David G. and Wilson, Alyson G. and Woodbridge, Diane M. -K.}, year={2015}, pages={340–352} } @article{steiner_dickinson_freeman_simpson_wilson_2015, title={Statistical Methods for Combining Information: Stryker Family of Vehicles Reliability Case Study}, volume={47}, url={http://dx.doi.org/10.1080/00224065.2015.11918142}, DOI={10.1080/00224065.2015.11918142}, abstractNote={Problem: Reliability is an essential element in assessing the operational suitability of Department of Defense weapon systems. Reliability takes a prominent role in both the design and analysis of operational tests. In the current era of reduced budgets and increased reliability requirements, it is challenging to verify reliability requirements in a single test. Furthermore, all available data should be considered in order to ensure evaluations provide the most appropriate analysis of the system's reliability. Approach: This paper describes the benefits of using parametric statistical models to combine information across multiple testing events. Both frequentist and Bayesian inference techniques are employed and they are compared and contrasted to illustrate different statistical methods for combining information. We apply these methods to data collected during the developmental and operational test phases for the Stryker family of vehicles. Results: We show that, when we combine the available information across two test phases for the Stryker family of vehicles, reliability estimates are more accurate and precise than those reported previously using traditional methods that use only operational test data in their reliability assessments.}, number={4}, journal={Journal of Quality Technology}, author={Steiner, Stefan and Dickinson, Rebecca M. and Freeman, Laura J. and Simpson, Bruce A. and Wilson, Alyson G.}, year={2015}, month={Oct}, pages={400–415} } @article{berry_fostvedt_nordman_phillips_seshadhri_wilson_2015, title={WHY DO SIMPLE ALGORITHMS FOR TRIANGLE ENUMERATION WORK IN THE REAL WORLD?}, volume={11}, ISSN={["1944-9488"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84953870563&partnerID=MN8TOARS}, DOI={10.1080/15427951.2015.1037030}, abstractNote={Listing all triangles is a fundamental graph operation.Triangles can have important interpretations in real-world graphs, especially social and other interaction networks.Despite the lack of provably efficient (linear, or slightly super linear) worst-case algorithms for this problem, practitioners run simple, efficient heuristics to find all triangles in graphs with millions of vertices.How are these heuristics exploiting the structure of these special graphs to provide major speedups in running time?We study one of the most prevalent algorithms used by practitioners.A trivial algorithm enumerates all paths of length 2, and checks if each such path is incident to a triangle.A good heuristic is to enumerate only those paths of length 2 in which the middle vertex has the lowest degree.It is easily implemented and is empirically known to give remarkable speedups over the trivial algorithm.We study the behavior of this algorithm over graphs with heavy-tailed degree distributions, a defining feature of real-world graphs.The erased configuration model (ECM) efficiently generates a graph with asymptotically (almost) any desired degree sequence.We show that the expected running time of this algorithm over the distribution of graphs created by the ECM is controlled by the 4/3 -norm of the degree sequence.Norms of the degree sequence are a measure of the heaviness of the tail, and it is precisely this feature that allows non trivial speedups of simple triangle enumeration algorithms.As a corollary of our main theorem, we prove expected linear-time performance for degree sequences following a power law with exponent α ≥ 7/3, and non trivial speedup whenever α ∈ (2, 3).}, number={6}, journal={INTERNET MATHEMATICS}, author={Berry, Jonathan W. and Fostvedt, Luke A. and Nordman, Daniel J. and Phillips, Cynthia A. and Seshadhri, C. and Wilson, Alyson G.}, year={2015}, month={Nov}, pages={555–571} } @article{casleton_beyler_genschel_wilson_2014, title={A Pilot Study Teaching Metrology in an Introductory Statistics Course}, volume={22}, ISSN={1069-1898}, url={http://dx.doi.org/10.1080/10691898.2014.11889710}, DOI={10.1080/10691898.2014.11889710}, abstractNote={Undergraduate students who have just completed an introductory statistics course often lack deep understanding of variability and enthusiasm for the field of statistics. This paper argues that by introducing the commonly underemphasized concept of measurement error, students will have a better chance of attaining both. We further present lecture materials and activities that introduce metrology, the science of measurement, which were developed and tested in a pilot study at Iowa State University. These materials explain how to characterize sources of variability in a dataset, in a way that is natural and accessible because the sources of variability are observable. Everyday examples of measurements, such as the amount of gasoline pumped into a car, are presented, and the consequences of variability within those measurements are discussed. To gauge the success of the material, students' initial and subsequent understanding of variability and their attitude toward the usefulness of statistics were analyzed in a comparative study. Questions from the CAOS and ARTIST assessments that pertain to using variability to make comparisons, understanding the standard deviation, and using graphical representations of variability were included in the assessment. The results of the comparative study indicate that most students who were exposed to the material improved their understanding of variability and had a greater appreciation of the value of statistics.}, number={3}, journal={Journal of Statistics Education}, publisher={Informa UK Limited}, author={Casleton, Emily and Beyler, Amy and Genschel, Ulrike and Wilson, Alyson}, year={2014}, month={Nov} } @article{hamada_wilson_weaver_griffiths_martz_2014, title={Bayesian Binomial Assurance Tests for System Reliability Using Component Data}, volume={46}, ISSN={["0022-4065"]}, url={http://dx.doi.org/10.1080/00224065.2014.11917952}, DOI={10.1080/00224065.2014.11917952}, abstractNote={This paper illustrates the development of Bayesian assurance test plans for system reliability assuming that binomial data will be collected on the system and that previous information is available from component testing. The posterior consumer's and producer's risks are used as the criteria for developing the test plan. Using the previous component information reduces the number of tests needed to achieve the same levels of risk. The proposed methodology is illustrated with two examples.}, number={1}, journal={JOURNAL OF QUALITY TECHNOLOGY}, author={Hamada, M. S. and Wilson, A. G. and Weaver, B. P. and Griffiths, R. W. and Martz, H. F.}, year={2014}, month={Jan}, pages={24–32} } @book{complex operational decision making in networked systems of humans and machines_2014, url={http://dx.doi.org/10.17226/18844}, DOI={10.17226/18844}, journal={National Academies Press}, year={2014}, month={Jun} } @article{reese_wilson_2014, title={Discussion of 'Methods for planning repeated measures accelerated degradation tests'}, volume={30}, ISSN={["1526-4025"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84919762822&partnerID=MN8TOARS}, DOI={10.1002/asmb.2090}, number={6}, journal={APPLIED STOCHASTIC MODELS IN BUSINESS AND INDUSTRY}, author={Reese, C. Shane and Wilson, Alyson G.}, year={2014}, pages={674–676} } @book{berry_leung_phillips_pinar_robinson_berger-wolf_bhowmick_casleton_kaiser_nordman_et al._2014, title={Statistically significant relational data mining :}, url={https://www.osti.gov/biblio/1204082}, DOI={10.2172/1204082}, abstractNote={computationally prohibitive for graphs of this size. We have addressed this limitation by developing more efficient algorithms for discerning community structure that can effectively process massive graphs. Current algorithms for detecting community structure, such as the high quality algorithm developed by Girvan and Newman [1], are only capable of processing relatively small graphs. The cubic complexity of Girvan and Newman, for example, makes it impractical for graphs with more than approximately 10{sup 4} nodes. Our goal for this project was to develop methodologies and corresponding algorithms capable of effectively processing graphs with up to 10{sup 9} nodes. From a practical standpoint, we expect the developed scalable algorithms to help resolve a variety of operational issues associated with the productive use of semantic graphs at LLNL. During FY07, we completed a graph clustering implementation that leverages a dynamic graph transformation to more efficiently decompose large graphs. In essence, our approach dynamically transforms the graph (or subgraphs) into a tree structure consisting of biconnected components interconnected by bridge links. This isomorphism allows us to compute edge betweenness, the chief source of inefficiency in Girvan and Newman's decomposition algorithm, much more efficiently, leading to significantly reduced computation time. Test runs on a desktop computer have shown reductions of up to 89%. Our focus this year has been on the implementation of parallel graph clustering on one of LLNL's supercomputers. In order to achieve efficiency in parallel computing, we have exploited the fact that large semantic graphs tend to be sparse, comprising loosely connected dense node clusters. When implemented on distributed memory computers, our approach performed well on several large graphs with up to one billion nodes, as shown in Table 2. The rightmost column of Table 2 contains the associated Newman's modularity [1], a metric that is widely used to assess the quality of community structure. Existing algorithms produce results that merely approximate the optimal solution, i.e., maximum modularity. We have developed a verification tool for decomposition algorithms, based upon a novel integer linear programming (ILP) approach, that computes an exact solution. We have used this ILP methodology to find the maximum modularity and corresponding optimal community structure for several well-studied graphs in the literature (e.g., Figure 1) [3]. The above approaches assume that modularity is the best measure of quality for community structure. In an effort to enhance this quality metric, we have also generalized Newman's modularity based upon an insightful random walk interpretation that allows us to vary the scope of the metric. Generalized modularity has enabled us to develop new, more flexible versions of our algorithms. In developing these methodologies, we have made several contributions to both graph theoretic algorithms and software engineering. We have written two research papers for refereed publication [3-4] and are working on another one [5]. In addition, we have presented our research findings at three academic and professional conferences.}, author={Berry, Jonathan W.; and Leung, Vitus Joseph; and Phillips, Cynthia Ann; and Pinar, Ali; and Robinson, David Gerald; and Berger-Wolf, Tanya; and Bhowmick, Sanjukta; and Casleton, Emily; and Kaiser, Mark; and Nordman, Daniel J.; and et al.}, year={2014}, month={Feb} } @inproceedings{berry_fostvedt_nordman_phillips_seshadhri_wilson_2014, title={Why do simple algorithms for triangle enumeration work in the real world?}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84893281676&partnerID=MN8TOARS}, DOI={10.1145/2554797.2554819}, abstractNote={Triangle enumeration is a fundamental graph operation. Despite the lack of provably efficient (linear, or slightly super-linear) worst-case algorithms for this problem, practitioners run simple, efficient heuristics to find all triangles in graphs with millions of vertices. How are these heuristics exploiting the structure of these special graphs to provide major speedups in running time? We study one of the most prevalent algorithms used by practitioners. A trivial algorithm enumerates all paths of length 2, and checks if each such path is incident to a triangle. A good heuristic is to enumerate only those paths of length 2 where the middle vertex has the lowest degree. It is easily implemented and is empirically known to give remarkable speedups over the trivial algorithm. We study the behavior of this algorithm over graphs with heavy-tailed degree distributions, a defining feature of real-world graphs. The erased configuration model (ECM) efficiently generates a graph with asymptotically (almost) any desired degree sequence. We show that the expected running time of this algorithm over the distribution of graphs created by the ECM is controlled by the l4/3-norm of the degree sequence. As a corollary of our main theorem, we prove expected linear-time performance for degree sequences following a power law with exponent α ≥ 7/3, and non-trivial speedup whenever α ∈ (2,3).}, booktitle={ITCS 2014 - Proceedings of the 2014 Conference on Innovations in Theoretical Computer Science}, author={Berry, J.W. and Fostvedt, L.K. and Nordman, D.J. and Phillips, C.A. and Seshadhri, C. and Wilson, A.G.}, year={2014}, pages={225–234} } @article{wendelberger_wilson_stinnett_gaydos_2014, title={Working in Interdisciplinary Teams}, url={http://dx.doi.org/10.1080/09332480.2014.988955}, DOI={10.1080/09332480.2014.988955}, abstractNote={Many scientists are drawn to statistics and biostatistics by their passion to make a difference using their skills in mathematics to tackle applied problems. We discuss a variety of aspects of work...}, journal={CHANCE}, author={Wendelberger, Joanne and Wilson, Alyson and Stinnett, Sandra and Gaydos, Brenda}, year={2014}, month={Oct} } @article{guo_wilson_2013, title={Bayesian Methods for Estimating System Reliability Using Heterogeneous Multilevel Information}, volume={55}, ISSN={["1537-2723"]}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84890052261&partnerID=MN8TOARS}, DOI={10.1080/00401706.2013.804441}, abstractNote={We propose a Bayesian approach for assessing the reliability of multicomponent systems. Our models allow us to evaluate system, subsystem, and component reliability using multilevel information. Data are collected over time, and include binary, lifetime, and degradation data. We illustrate the methodology through two examples and discuss extensions. Supplementary materials are available online.}, number={4}, journal={TECHNOMETRICS}, author={Guo, Jiqiang and Wilson, Alyson G.}, year={2013}, month={Nov}, pages={461–472} } @article{guo_nordman_wilson_2013, title={Bayesian nonparametric models for community detection}, volume={55}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84890066326&partnerID=MN8TOARS}, DOI={10.1080/00401706.2013.804438}, abstractNote={We propose a series of Bayesian nonparametric statistical models for community detection in graphs. We model the probability of the presence or absence of edges within the graph. Using these models, we naturally incorporate uncertainty and variability and take advantage of nonparametric techniques, such as the Chinese restaurant process and the Dirichlet process. Some of the contributions include: (a) the community structure is directly modeled without specifying the number of communities a priori; (b) the probabilities of edges within or between communities may be modeled as varying by community or pairs of communities; (c) some nodes can be classified as not belonging to any community; and (d) Bayesian model diagnostics are used to compare models and help with appropriate model selection. We start by fitting an initial model to a well-known network dataset, and we develop a series of increasingly complex models. We propose Markov chain Monte Carlo algorithms to carry out the estimation as well as an approach for community detection using the posterior distributions under a decision theoretical framework. Bayesian nonparametric techniques allow us to estimate the number and structure of communities from the data. To evaluate the proposed models for the example dataset, we discuss model comparison using the deviance information criterion and model checking using posterior predictive distributions. Supplementary materials are available online.}, number={4}, journal={Technometrics}, author={Guo, J. and Nordman, D.J. and Wilson, Alyson}, year={2013}, pages={390–402} } @article{weaver_hamada_vardeman_wilson_2012, title={A Bayesian approach to the analysis of gauge R&R data}, volume={24}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84867050766&partnerID=MN8TOARS}, DOI={10.1080/08982112.2012.702381}, abstractNote={ABSTRACT Gauge repeatability and reproducibility (R&R) studies are used to assess precision of measurement systems. In particular, they are used to quantify the importance of various sources of variability in a measurement system. We take a Bayesian approach to data analysis and show how to estimate variance components associated with the sources of variability and relevant functions of these using the gauge R&R data together with prior information. We then provide worked examples of gauge R&R data analysis for types of studies common in industrial applications. With each example we provide WinBUGS code to illustrate how easy it is to implement a Bayesian analysis of gauge R&R data.}, number={4}, journal={Quality Engineering}, author={Weaver, B.P. and Hamada, M.S. and Vardeman, S.B. and Wilson, A.G.}, year={2012}, pages={486–500} } @book{assessing the reliability of complex models_2012, url={http://dx.doi.org/10.17226/13395}, DOI={10.17226/13395}, journal={National Academies Press}, year={2012}, month={Jun} } @book{industrial methods for the effective development and testing of defense systems_2012, url={http://dx.doi.org/10.17226/13291}, DOI={10.17226/13291}, journal={National Academies Press}, year={2012}, month={Jan} } @article{anderson-cook_lu_clark_dehart_hoerl_jones_mackay_montgomery_parker_simpson_et al._2012, title={Statistical engineering-forming the foundations}, volume={24}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84859806129&partnerID=MN8TOARS}, DOI={10.1080/08982112.2012.641150}, abstractNote={Editors: Christine M. Anderson-Cook, Lu Lu, Panelists: Gordon Clark, Stephanie P. DeHart, Roger Hoerl, Bradley Jones, R. Jock MacKay, Douglas Montgomery, Peter A. Parker, James Simpson, Ronald Snee, Stefan H. Steiner, Jennifer Van Mullekom, G. Geoff Vining, Alyson G. Wilson Los Alamos National Laboratory, Los Alamos, New Mexico Ohio State University, Columbus, Ohio DuPont, Roanoke, Virginia GE Global Research, Schenectady, New York SAS, Cary, North Carolina University of Waterloo, Waterloo, Ontario, Canada Arizona State University, Tempe, Arizona NASA, Langley, Virginia Eglin Air Force Base, Valparaiso, Florida Snee Associates, Newark, Delaware DuPont, Richmond, Virginia Virginia Tech, Blacksburg, Virginia Institute for Defense Analyses, Washington, DC INTRODUCTION}, number={2}, journal={Quality Engineering}, author={Anderson-Cook, C.M. and Lu, L. and Clark, G. and Dehart, S.P. and Hoerl, R. and Jones, B. and MacKay, R.J. and Montgomery, D. and Parker, P.A. and Simpson, J. and et al.}, year={2012}, pages={110–132} } @article{anderson-cook_lu_clark_dehart_hoerl_jones_mackay_montgomery_parker_simpson_et al._2012, title={Statistical engineering-roles for statisticians and the path forward}, volume={24}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84859802152&partnerID=MN8TOARS}, DOI={10.1080/08982112.2012.641151}, abstractNote={Experts from diverse areas of industry, government, and academia are asked about the changing roles for statisticians in the SE workplace and discuss some of the opportunities and challenges for the future.}, number={2}, journal={Quality Engineering}, author={Anderson-Cook, C.M. and Lu, L. and Clark, G. and Dehart, S.P. and Hoerl, R. and Jones, B. and MacKay, R.J. and Montgomery, D. and Parker, P.A. and Simpson, J. and et al.}, year={2012}, pages={133–152} } @book{testing of body armor materials_2012, url={http://dx.doi.org/10.17226/13390}, DOI={10.17226/13390}, journal={National Academies Press}, year={2012}, month={Jun} } @article{reese_wilson_guo_hamada_johnson_2011, title={A Bayesian Model for Integrating Multiple Sources of Lifetime Information in System-Reliability Assessments}, volume={43}, ISSN={0022-4065 2575-6230}, url={http://dx.doi.org/10.1080/00224065.2011.11917851}, DOI={10.1080/00224065.2011.11917851}, abstractNote={We present a Bayesian model for assessing the reliability of multicomponent systems. Novel features of this model are the natural manner in which lifetime data collected at either the component, subsystem, or system level are integrated with prior information at any level. The model allows pooling of information between similar components, the incorporation of expert opinion, and straightforward handling of censored data. The methodology is illustrated with two examples.}, number={2}, journal={Journal of Quality Technology}, publisher={Informa UK Limited}, author={Reese, C. Shane and Wilson, Alyson G. and Guo, Jiqiang and Hamada, Michael S. and Johnson, Valen E.}, year={2011}, month={Apr}, pages={127–141} } @article{wilson_anderson-cook_huzurbazar_2011, title={A case study for quantifying system reliability and uncertainty}, volume={96}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-79959575281&partnerID=MN8TOARS}, DOI={10.1016/j.ress.2010.09.012}, abstractNote={The ability to estimate system reliability with an appropriate measure of associated uncertainty is important for understanding its expected performance over time. Frequently, obtaining full-system data is prohibitively expensive, impractical, or not permissible. Hence, methodology which allows for the combination of different types of data at the component or subsystem levels can allow for improved estimation at the system level. We apply methodologies for aggregating uncertainty from component-level data to estimate system reliability and quantify its overall uncertainty. This paper provides a proof-of-concept that uncertainty quantification methods using Bayesian methodology can be constructed and applied to system reliability problems for a system with both series and parallel structures.}, number={9}, journal={Reliability Engineering and System Safety}, author={Wilson, A.G. and Anderson-Cook, C.M. and Huzurbazar, A.V.}, year={2011}, pages={1076–1084} } @article{wiel_graves_wilson_reese_2011, title={A random onset model for degradation of high-reliability systems}, volume={53}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-79958697393&partnerID=MN8TOARS}, DOI={10.1198/TECH.2011.09119}, abstractNote={Weapons stockpiles are expected to have high reliability over time, but prudence demands regular testing to detect detrimental aging effects and maintain confidence that reliability is high. We present a model, called RADAR, in which a stockpile has high initial reliability that may begin declining at any time. RADAR provides a framework for answering questions about how confidence in continued high reliability can change as a result of reduced sampling, discovery of failed units, and information about when a unit failed. Supplemental materials (available on the Technometrics web site) provide lemmas used in the proof of Theorem 1, details of the Markov chain Monte Carlo algorithm, and additional examples.}, number={2}, journal={Technometrics}, author={Wiel, S.V. and Graves, T. and Wilson, A. and Reese, S.}, year={2011}, pages={163–172} } @article{hamada_huzurbazar_vander wiel_wilson_2011, title={Assessing the risks of sampling rates for surveilling a population}, volume={23}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-79958853015&partnerID=MN8TOARS}, DOI={10.1080/08982112.2011.575747}, abstractNote={ABSTRACT Surveillance of a population, such as a weapon stockpile, is needed to discover manufacturing defects as well as deterioration as the population ages. This article considers the risks of sampling rates for surveillance from three perspectives: detection probability of defects in a proportion of a population with pass/fail data, detection of a trend in a defective proportion of the population with pass/fail data, and detection of a trend with quantitative degradation measurements. Understanding of these risks will help the decision maker choose a sampling rate to protect against such problems of a specified size at a tolerable risk.}, number={3}, journal={Quality Engineering}, author={Hamada, M.S. and Huzurbazar, A.V. and Vander Wiel, S. and Wilson, A.G.}, year={2011}, pages={242–252} } @article{lu_anderson-cook_wilson_2011, title={Choosing a consumption strategy for a population of units based on reliability}, volume={225}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84856273309&partnerID=MN8TOARS}, DOI={10.1177/1748006X11392287}, abstractNote={ Managers and decision makers are often faced with difficult decisions balancing multiple competing objectives when selecting between several strategies for how to use the units in their inventory or stockpile. This paper considers how to define different metrics which appropriately summarize the objectives of a good strategy, how to consider what impact unanticipated changes in the future might have, and how to combine several criteria into a decision when no global winner is likely. This process is discussed in the context of maximizing the reliability of a population of single-use non-repairable units, such as missiles or batteries, which are being consumed (used and removed from the population) as they age over time. }, number={4}, journal={Proceedings of the Institution of Mechanical Engineers, Part O: Journal of Risk and Reliability}, author={Lu, L. and Anderson-Cook, C.M. and Wilson, A.G.}, year={2011}, pages={407–423} } @article{anderson-cook_crowder_huzurbazar_lorio_ringland_wilson_2011, title={Quantifying reliability uncertainty from catastrophic and margin defects: A proof of concept}, volume={96}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-79959601094&partnerID=MN8TOARS}, DOI={10.1016/j.ress.2010.10.006}, abstractNote={We aim to analyze the effects of component level reliability data, including both catastrophic failures and margin failures, on system level reliability. While much work has been done to analyze margins and uncertainties at the component level, a gap exists in relating this component level analysis to the system level. We apply methodologies for aggregating uncertainty from component level data to quantify overall system uncertainty. We explore three approaches towards this goal, the classical Method of Moments (MOM), Bayesian, and Bootstrap methods. These three approaches are used to quantify the uncertainty in reliability for a system of mixed series and parallel components for which both pass/fail and continuous margin data are available. This paper provides proof of concept that uncertainty quantification methods can be constructed and applied to system reliability problems. In addition, application of these methods demonstrates that the results from the three fundamentally different approaches can be quite comparable.}, number={9}, journal={Reliability Engineering and System Safety}, author={Anderson-Cook, C.M. and Crowder, S. and Huzurbazar, A.V. and Lorio, J. and Ringland, J. and Wilson, A.G.}, year={2011}, pages={1063–1075} } @article{wilson_anderson-cook_2010, title={Comment}, volume={52}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-78249237164&partnerID=MN8TOARS}, DOI={10.1198/TECH.2010.09178}, abstractNote={i=t+1 (i + 1) (i) (2i + 1) )m e−μμm m! (C) for t = 1,2, . . . . We may interpret P(T > t|μ, (n = x = 1)) as the initial reliability of the system, and the tracking of reliability growth as the process of updating this probability subsequent to each cycle of testing and eliminating the failure causing modes. With μ, n = x = 1, specified, the above expression can be numerically calculated (or approximated by truncating the upper limits of m and i). The Ti’s of Equation (A) provide information about n and x. Since the Pi’s are generated from a common beta distribution there is an absence of discriminatory power about the various failure modes under this setup. Were each Pi generated by its own beta distribution with ni and xi as parameters, predictions about individual mode failures would have been possible. Thus the model of the article in question is restrictive. Given Ti = ti, i = 1, . . . , we may estimate n and x via either the method of maximum likelihood or a Bayesian analysis. In either case, Equation (A) would be the basis of the likelihood function. Information about μ is provided by Equation (B), or by Equation (C), where n = x = 1. In the former case one would either plug in the maximum likelihood estimates of n and x in Equation (B), and then with T observed to be greater than some t, one would use this as the data to construct a likelihood to estimate μ. Once this is done the problem of tracking reliability growth is addressed. It is not clear to this discussant as to how the several questions the article hopes to answer can be formally addressed, given the model of Section 2.}, number={4}, journal={Technometrics}, author={Wilson, A.G. and Anderson-Cook, C.M.}, year={2010}, pages={397–400} } @book{kegelmeyer_2010, title={Network discovery, characterization, and prediction : a grand challenge LDRD final report.}, url={https://www.osti.gov/biblio/1011623}, DOI={10.2172/1011623}, abstractNote={NGC knit together previously disparate research staff and user expertise in a fashion that not only addressed our immediate research goals, but which promises to have created an enduring cultural legacy of mutual understanding, in service of Sandia's national security responsibilities in cybersecurity and counter proliferation.}, author={Kegelmeyer, W. Philip, Jr.}, year={2010}, month={Nov} } @book{testing of body armor materials for use by the u.s. army--phase ii_2010, url={http://dx.doi.org/10.17226/12885}, DOI={10.17226/12885}, journal={National Academies Press}, year={2010}, month={May} } @book{phase i report on review of the testing of body armor materials for use by the u.s. army_2009, url={http://dx.doi.org/10.17226/12837}, DOI={10.17226/12837}, journal={National Academies Press}, year={2009}, month={Jan} } @article{singpurwalla_wilson_2009, title={Probability, chance and the probability of chance}, volume={41}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-56849107795&partnerID=MN8TOARS}, DOI={10.1080/07408170802322630}, abstractNote={In our day-to-day discourse on uncertainty, words like belief, chance, plausible, likelihood and probability are commonly encountered. Often, these words are used interchangeably, because they are intended to encapsulate some loosely articulated notions about the unknowns. The purpose of this paper is to propose a framework that is able to show how each of these terms can be made precise, so that each reflects a distinct meaning. To construct our framework, we use a basic scenario upon which caveats are introduced. Each caveat motivates us to bring in one or more of the above notions. The scenario considered here is very basic; it arises in both the biomedical context of survival analysis and the industrial context of engineering reliability. This paper is expository and much of what is said here has been said before. However, the manner in which we introduce the material via a hierarchy of caveats that could arise in practice, namely our proposed framework, is the novel aspect of this paper. To appreciate all this, we require of the reader a knowledge of the calculus of probability. However, in order to make our distinctions transparent, probability has to be interpreted subjectively, not as an objective relative frequency.}, number={1}, journal={IIE Transactions (Institute of Industrial Engineers)}, author={Singpurwalla, N.D. and Wilson, A.G.}, year={2009}, pages={12–22} } @book{diegert_dvorack_ringland_mundt_huzurbazar_lorio_fatherley_anderson-cook_wilson_zurn_2009, title={Quantifying reliability uncertainty : a proof of concept.}, url={https://www.osti.gov/biblio/970305}, DOI={10.2172/970305}, abstractNote={This paper develops Classical and Bayesian methods for quantifying the uncertainty in reliability for a system of mixed series and parallel components for which both go/no-go and variables data are available. Classical methods focus on uncertainty due to sampling error. Bayesian methods can explore both sampling error and other knowledge-based uncertainties. To date, the reliability community has focused on qualitative statements about uncertainty because there was no consensus on how to quantify them. This paper provides a proof of concept that workable, meaningful quantification methods can be constructed. In addition, the application of the methods demonstrated that the results from the two fundamentally different approaches can be quite comparable. In both approaches, results are sensitive to the details of how one handles components for which no failures have been seen in relatively few tests.}, author={Diegert, Kathleen V; and Dvorack, Michael A; and Ringland, James T; and Mundt, Michael Joseph; and Huzurbazar, Aparna ; and Lorio, John F; and Fatherley, Quinn ; and Anderson-Cook, Christine ; and Wilson, Alyson G ; and Zurn, Rena M}, year={2009}, month={Oct} } @article{parnell_borio_cox_brown_pollock_wilson_2009, title={Response to Ezell and von Winterfeldt}, volume={7}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-65349093369&partnerID=MN8TOARS}, DOI={10.1089/bsp.2009.0927}, abstractNote={Biosecurity and Bioterrorism: Biodefense Strategy, Practice, and ScienceVol. 7, No. 1 CommentaryResponse to Ezell and von WinterfeldtGregory S. Parnell, Luciana L. Borio, Louis A. (Tony) Cox, Gerald G. Brown, Stephen Pollock, and Alyson G. WilsonGregory S. ParnellSearch for more papers by this author, Luciana L. BorioSearch for more papers by this author, Louis A. (Tony) CoxSearch for more papers by this author, Gerald G. BrownSearch for more papers by this author, Stephen PollockSearch for more papers by this author, and Alyson G. WilsonSearch for more papers by this authorPublished Online:20 Apr 2009https://doi.org/10.1089/bsp.2009.0927AboutSectionsPDF/EPUB Permissions & CitationsPermissionsDownload CitationsTrack CitationsAdd to favorites Back To Publication ShareShare onFacebookTwitterLinked InRedditEmail "Response to Ezell and von Winterfeldt." , 7(1), pp. 111–112FiguresReferencesRelatedDetailsCited byIs ALARP applicable to the management of terrorist risks?Reliability Engineering & System Safety, Vol. 95, No. 8 Volume 7Issue 1Mar 2009 InformationMary Ann Liebert, Inc.To cite this article:Gregory S. Parnell, Luciana L. Borio, Louis A. (Tony) Cox, Gerald G. Brown, Stephen Pollock, and Alyson G. Wilson.Response to Ezell and von Winterfeldt.Biosecurity and Bioterrorism: Biodefense Strategy, Practice, and Science.Mar 2009.111-112.http://doi.org/10.1089/bsp.2009.0927Published in Volume: 7 Issue 1: April 20, 2009PDF download}, number={1}, journal={Biosecurity and Bioterrorism}, author={Parnell, G.S. and Borio, L.L. and Cox, L.A. and Brown, G.G. and Pollock, S. and Wilson, A.G.}, year={2009}, pages={111–112} } @article{wilson_huzurbazar_sentz_2009, title={The Imprecise Dirichlet Model for Multilevel System Reliability}, volume={3}, ISSN={1559-8608 1559-8616}, url={http://dx.doi.org/10.1080/15598608.2009.10411921}, DOI={10.1080/15598608.2009.10411921}, abstractNote={In this paper we expand on recent advances in Bayesian inference for multilevel data in fault trees and Bayesian networks. As a first example, we compare the Bayesian fault tree and incomplete data approaches to statistical inference for multilevel data in fault trees. As a second example, we consider two a priori representations of uncertainty about the parameters of a Bayesian network: a multinomial- Dirichlet model and an extension of the imprecise Dirichlet model. We calculate the a posteriori uncertainty after updating with data using Markov chain Monte Carlo and compare the results.}, number={1}, journal={Journal of Statistical Theory and Practice}, publisher={Springer Science and Business Media LLC}, author={Wilson, Alyson G. and Huzurbazar, Aparna V. and Sentz, Kari}, year={2009}, month={Mar}, pages={211–223} } @book{hamada_wilson_reese_martz_2008, title={Bayesian Reliability}, url={http://dx.doi.org/10.1007/978-0-387-77950-8}, DOI={10.1007/978-0-387-77950-8}, abstractNote={Bayesian Reliability presents modern methods and techniques for analyzing reliability data from a Bayesian perspective. The adoption and application of Bayesian methods in virtually all branches of sc}, journal={Springer New York}, author={Hamada, Michael S. and Wilson, Alyson G. and Reese, C. Shane and Martz, Harry F.}, year={2008} } @book{department of homeland security bioterrorism risk assessment_2008, url={http://dx.doi.org/10.17226/12206}, DOI={10.17226/12206}, journal={National Academies Press}, year={2008}, month={Dec} } @article{anderson-cook_graves_hengartner_klamann_wiedlea_wilson_anderson_lopez_2008, title={Reliability Modeling using Both System Test and Quality Assurance Data}, url={http://dx.doi.org/10.5711/morj.13.3.5}, DOI={10.5711/morj.13.3.5}, abstractNote={A system with several components may undergo full system pass/fail testing as well as quality assurance testing at the single component level. The component tests are informative about system reliability, although they measure different things than the full system tests. We present a Bayesian framework for integrating the two types of test data for better reliability estimates. Our formulation allows the reliability to depend on covariates such as age. One result of the inference is a better understanding of the relationship between component tests and system performance. We illustrate the ideas using a small subsystem of a larger (proprietary) system.}, journal={Military Operations Research}, author={Anderson-Cook, Christine M. and Graves, Todd and Hengartner, Nicolas and Klamann, Richard and Wiedlea, Andrew C.K. and Wilson, Alyson G. and Anderson, Greg and Lopez, George}, year={2008}, month={Jun} } @article{parnell_borio_brown_banks_wilson_2008, title={Scientists urge DHS to improve bioterrorism risk assessment}, volume={6}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-58149232416&partnerID=MN8TOARS}, DOI={10.1089/bsp.2008.0930}, abstractNote={In 2006, the Department of Homeland Security (DHS) completed its first Bioterrorism Risk Assessment (BTRA), intended to be the foundation for DHS's subsequent biennial risk assessments mandated by Homeland Security Presidential Directive 10 (HSPD-10). At the request of DHS, the National Research Council established the Committee on Methodological Improvements to the Department of Homeland Security's Biological Agent Risk Analysis to provide an independent, scientific peer review of the BTRA. The Committee found a number of shortcomings in the BTRA, including a failure to consider terrorists as intelligent adversaries in their models, unnecessary complexity in threat and consequence modeling and simulations, and a lack of focus on risk management. The Committee unanimously concluded that an improved BTRA is needed to provide a more credible foundation for risk-informed decision making.}, number={4}, journal={Biosecurity and Bioterrorism}, author={Parnell, G.S. and Borio, L.L. and Brown, G.G. and Banks, D. and Wilson, A.G.}, year={2008}, pages={353–356} } @article{anderson-cook_graves_hamada_hengartner_johnson_reese_wilson_2007, title={Bayesian Stockpile Reliability Methodology for Complex Systems}, url={http://dx.doi.org/10.5711/morj.12.2.25}, DOI={10.5711/morj.12.2.25}, journal={Military Operations Research}, author={Anderson-Cook, Christine M. and Graves, Todd and Hamada, Michael and Hengartner, Nicholas and Johnson, Valen E. and Reese, C. Shane and Wilson, Alyson G.}, year={2007}, month={Mar} } @article{wilson_huzurbazar_2007, title={Bayesian networks for multilevel system reliability}, volume={92}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-34250158140&partnerID=MN8TOARS}, DOI={10.1016/j.ress.2006.09.003}, abstractNote={Bayesian networks have recently found many applications in systems reliability; however, the focus has been on binary outcomes. In this paper we extend their use to multilevel discrete data and discuss how to make joint inference about all of the nodes in the network. These methods are applicable when system structures are too complex to be represented by fault trees. The methods are illustrated through four examples that are structured to clarify the scope of the problem.}, number={10}, journal={Reliability Engineering and System Safety}, author={Wilson, A.G. and Huzurbazar, A.V.}, year={2007}, pages={1413–1420} } @misc{wilson_2007, title={Hierarchical Markov Chain Monte Carlo (MCMC) for Bayesian System Reliability}, url={http://dx.doi.org/10.1002/9780470061572.eqr094}, DOI={10.1002/9780470061572.eqr094}, abstractNote={Abstract Hierarchical models are one of the central tools of Bayesian analysis. They offer many advantages, including the ability to borrow strength to estimate individual parameters and the ability to specify complex models that reflect engineering and physical realities. Markov chain Monte Carlo (MCMC) is a set of algorithms that allow Bayesian inference in a variety of models. We illustrate hierarchical models and MCMC in a Bayesian system reliability example.}, journal={Encyclopedia of Statistics in Quality and Reliability}, author={Wilson, Alyson G.}, year={2007}, month={Dec} } @article{wilson_mcnamara_wilson_2007, title={Information integration for complex systems}, volume={92}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-33748291454&partnerID=MN8TOARS}, DOI={10.1016/j.ress.2006.07.003}, abstractNote={This paper develops a framework to determine the performance or reliability of a complex system. We consider a case study in missile reliability that focuses on the assessment of a high fidelity launch vehicle intended to emulate a ballistic missile threat. In particular, we address the case of how to make a system assessment when there are limited full-system tests. We address the development of a system model and the integration of a variety of data using a Bayesian network.}, number={1}, journal={Reliability Engineering and System Safety}, author={Wilson, A.G. and McNamara, L.A. and Wilson, G.D.}, year={2007}, pages={121–130} } @article{wilson_graves_hamada_reese_2006, title={Advances in data combination, analysis and collection for system reliability assessment}, volume={21}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-34249308477&partnerID=MN8TOARS}, DOI={10.1214/088342306000000439}, abstractNote={The systems that statisticians are asked to assess, such as nuclear weapons, infrastructure networks, supercomputer codes and munitions, have become increasingly complex. It is often costly to conduct full system tests. As such, we present a review of methodology that has been proposed for addressing system reliability with limited full system testing. The first approaches presented in this paper are concerned with the combination of multiple sources of information to assess the reliability of a single component. The second general set of methodology addresses the combination of multiple levels of data to determine system reliability. We then present developments for complex systems beyond traditional series/parallel representations through the use of Bayesian networks and flowgraph models. We also include methodological contributions to resource allocation considerations for system relability assessment. We illustrate each method with applications primarily encountered at Los Alamos National Laboratory.}, number={4}, journal={Statistical Science}, author={Wilson, A.G. and Graves, T.L. and Hamada, M.S. and Reese, C.S.}, year={2006}, pages={514–531} } @article{keller-mcnulty_wilson_anderson-cook_2006, title={Reliability}, volume={21}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-34249333066&partnerID=MN8TOARS}, DOI={10.1214/088342306000000664}, abstractNote={In the Public Domain ReliabilityThis special volume of Statistical Sciences presents some innovative, if not provocative, ideas in the area of reliability, or perhaps more appropriately named, integrated system assessment.In this age of exponential growth in science, engineering and technology, the capability to evaluate the performance, reliability and safety of complex systems presents new challenges.Today's methodology must respond to the ever-increasing demands for such evaluations to provide key information for decision and policy makers at all levels of government and industry-problems ranging from international security to space exploration.We, the co-editors of this volume and the authors, believe that scientific progress in reliability assessment requires the development of processes, methods and tools that combine diverse information types (e.g., experiments, computer simulations, expert knowledge) from diverse sources (e.g., scientists, engineers, business developers, technology integrators, decision makers) to assess quantitative performance metrics that can aid decision making under uncertainty.These are highly interdisciplinary problems.The principal role of statistical sciences is to bring statistical rigor, thinking and methodology to these problems.Bedford, Quigley and Walls open the issue by reviewing the role of expert judgment to support re-}, number={4}, journal={Statistical Science}, author={Keller-McNulty, S. and Wilson, A. and Anderson-Cook, C.}, year={2006} } @book{wilson_wilson_olwell_2006, title={Statistical methods in counterterrorism: Game theory, modeling, syndromic surveillance, and biometric authentication}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-84889962244&partnerID=MN8TOARS}, DOI={10.1007/0-387-35209-0}, journal={Statistical Methods in Counterterrorism: Game Theory, Modeling, Syndromic Surveillance, and Biometric Authentication}, author={Wilson, A.G. and Wilson, G.D. and Olwell, D.H.}, year={2006}, pages={1–292} } @inproceedings{sentz_wilson_2005, title={Fault tree uncertainty quantification using probabilities and belief structures on basic and non-basic events}, volume={2005}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-33744978352&partnerID=MN8TOARS}, DOI={10.1109/NAFIPS.2005.1548509}, abstractNote={In the vast majority of the literature on fault trees, information is only provided on the basic events and is described probabilistically. In this paper, we extend this in two ways: we consider information at both basic and non-basic events, and we describe our uncertainties about this information in terms of both probability and Dempster-Shafer evidence theory. We develop these extensions for the AND gate.}, booktitle={Annual Conference of the North American Fuzzy Information Processing Society - NAFIPS}, author={Sentz, K. and Wilson, A.}, year={2005}, pages={65–68} } @book{wilson_limnios_keller-mcnulty_armijo_2005, title={Modern Statistical and Mathematical Methods in Reliability}, url={http://dx.doi.org/10.1142/5844}, DOI={10.1142/5844}, abstractNote={# Competing Risk Modeling in Reliability (T Bedford) # Game-Theoretic and Reliability Methods in Counter-Terrorism and Security (V Bier) # Regression Models for Reliability Given the Usage Accumulation History (T Duchesne) # Bayesian Methods for Assessing System Reliability: Models and Computation (T Graves & M Hamada) # Dynamic Modeling in Reliability and Survival Analysis (E A Pena & E Slate) # End of Life Analysis (H Wynn et al.) # and other papers}, journal={Series on Quality, Reliability and Engineering Statistics}, author={Wilson, Alyson and Limnios, Nikolaos and Keller-McNulty, Sallie and Armijo, Yvonne}, year={2005}, month={Oct} } @article{keller-mcnulty_wilson_wilson_2005, title={The Impact of Technology on the Scientific Method}, url={http://dx.doi.org/10.1080/09332480.2005.10722744}, DOI={10.1080/09332480.2005.10722744}, abstractNote={(2005). The Impact of Technology on the Scientific Method. CHANCE: Vol. 18, Truth is Slower than Fiction: Francis Galton as an Illustration, pp. 4-8.}, journal={CHANCE}, author={Keller-McNulty, S. and Wilson, A. G. and Wilson, G.}, year={2005}, month={Sep} } @article{hamada_martz_reese_graves_johnson_wilson_2004, title={A fully Bayesian approach for combining multilevel failure information in fault tree quantification and optimal follow-on resource allocation}, volume={86}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-4344645794&partnerID=MN8TOARS}, DOI={10.1016/j.ress.2004.02.001}, abstractNote={This paper presents a fully Bayesian approach that simultaneously combines non-overlapping (in time) basic event and higher-level event failure data in fault tree quantification. Such higher-level data often correspond to train, subsystem or system failure events. The fully Bayesian approach also automatically propagates the highest-level data to lower levels in the fault tree. A simple example illustrates our approach. The optimal allocation of resources for collecting additional data from a choice of different level events is also presented. The optimization is achieved using a genetic algorithm.}, number={3}, journal={Reliability Engineering and System Safety}, author={Hamada, M. and Martz, H.F. and Reese, C.S. and Graves, T. and Johnson, V. and Wilson, A.G.}, year={2004}, pages={297–305} } @article{wilson_hamada_xu_2004, title={Assessing Production Quality with Nonstandard Measurement Errors}, url={http://dx.doi.org/10.1080/00224065.2004.11980265}, DOI={10.1080/00224065.2004.11980265}, abstractNote={We consider the assessment of a manufacturing process's performance when a sample of parts produced by the process is measured with error. When the measurement error variance depends on the true characteristic of the part being measured, nonstandard variance components models are needed. We consider a Bayesian approach, showing how this methodology can be used to calculate tolerance intervals for the part distribution to assess the manufacturing process's performance and to determine other important quantities such as release specifications. In addition, we show how to handle censored data.}, journal={Journal of Quality Technology}, author={Wilson, Alyson and Hamada, Michael and Xu, Meng}, year={2004}, month={Apr} } @book{improved operational testing and evaluation and methods of combining test information for the stryker family of vehicles and related army systems_2004, url={http://dx.doi.org/10.17226/10871}, DOI={10.17226/10871}, abstractNote={Download a PDF of "Improved Operational Testing and Evaluation and Methods of Combining Test Information for the Stryker Family of Vehicles and Related Army Systems" by the National Research Council for free.}, journal={National Academies Press}, year={2004}, month={Dec} } @inbook{integrated analysis of computational and physical experimental lifetime data_2004, url={http://dx.doi.org/10.1007/978-1-4419-9021-1}, DOI={10.1007/978-1-4419-9021-1}, abstractNote={In this volume consideration was given to more advanced theoretical approaches and novel applications of reliability to ensure that topics having a futuristic impact were specifically included. Topics}, booktitle={Mathematical Reliability: An Expository Perspective}, year={2004} } @article{reese_wilson_hamada_martz_ryan_2004, title={Integrated Analysis of Computer and Physical Experiments}, url={http://dx.doi.org/10.1198/004017004000000211}, DOI={10.1198/004017004000000211}, abstractNote={Scientific investigations frequently involve data from computer experiment(s) as well as related physical experimental data on the same factors and related response variable(s). There may also be one or more expert opinions regarding the response of interest. Traditional statistical approaches consider each of these datasets separately with corresponding separate analyses and fitted statistical models. A compelling argument can be made that better, more precise statistical models can be obtained if the combined data are analyzed simultaneously using a hierarchical Bayesian integrated modeling approach. However, such an integrated approach must recognize important differences, such as possible biases, in these experiments and expert opinions. We illustrate our proposed integrated methodology by using it to model the thermodynamic operation point of a top-spray fluidized bed microencapsulation processing unit. Such units are used in the food industry to tune the effect of functional ingredients and additives. An important thermodynamic response variable of interest, Y, is the steady-state outlet air temperature. In addition to a set of physical experimental observations involving six factors used to predictY, similar results from three different computer models are also available. The integrated data from the physical experiment and the three computer models are used to fit an appropriate response surface (regression) model for predicting Y.}, journal={Technometrics}, author={Reese, C. Shane and Wilson, Alyson G and Hamada, Michael and Martz, Harry F and Ryan, Kenneth J}, year={2004}, month={May} } @book{improved operational testing and evaluation_2003, url={http://dx.doi.org/10.17226/10710}, DOI={10.17226/10710}, journal={National Academies Press}, year={2003}, month={Jun} } @inbook{reliability for the 21st century_2003, DOI={doi.org/10.1142/5248}, abstractNote={Reliability Theory in the Past and Present Centuries General Aspects of Reliability Modelling Reliability of Networks and Systems Stochastic Modelling and Optimization in Reliability Modelling in Survival and Reliability Analysis Statistical Methods for Degradation Data Statistical Methods for Maintained Systems Statistical Inference in Survival Analysis Software Reliability Methods.}, booktitle={Mathematical and Statistical Methods in Reliability}, year={2003} } @book{test design and evaluation for the interim armored vehicle: letter report_2002, url={http://dx.doi.org/10.17226/10529}, DOI={10.17226/10529}, journal={National Academies Press}, year={2002} } @article{hamada_martz_reese_wilson_2001, title={Finding near-optimal Bayesian experimental designs via Genetic algorithms}, volume={55}, url={http://www.scopus.com/inward/record.url?eid=2-s2.0-0035620460&partnerID=MN8TOARS}, DOI={10.1198/000313001317098121}, abstractNote={This article shows how a genetic algorithm can be used to find near-optimal Bayesia nexperimental designs for regression models. The design criterion considered is the expected Shannon information gain of the posterior distribution obtained from performing a given experiment compared with the prior distribution. Genetic algorithms are described and then applied to experimental design. The methodology is then illustrated with a wide range of examples: linear and nonlinear regression, single and multiple factors, and normal and Bernoulli distributed experimental data.}, number={3}, journal={American Statistician}, author={Hamada, M. and Martz, H.F. and Reese, C.S. and Wilson, A.G.}, year={2001}, pages={175–181} } @misc{l_s_m_valen_g_1999, title={Image object matching using core analysis and deformable shape loci}, url={https://www.lens.org/161-652-391-335-889}, number={US 5926568 A}, author={L, CHANEY EDWARD and S, FRITSCH DANIEL and M, PIZER STEPHEN and VALEN, JOHNSON and G, WILSON ALYSON}, year={1999}, month={Jul} } @inbook{wilson_johnson_1996, title={Models for Shape Deformation}, url={http://dx.doi.org/10.1093/oso/9780198523567.003.0061}, DOI={10.1093/oso/9780198523567.003.0061}, abstractNote={Abstract Much work in Bayesian image analysis has focused on incorporating vague prior knowledge about an image into its analysis and on the calculation of appropriate estimates of the resulting posterior distribution. However, in the field of medical imaging, there is a need to incorporate specific prior information into many image analysis tasks. This paper discusses various models for shape deformation and proposes a model that accounts for features at multiple spatial resolutions.}, booktitle={Bayesian Statistics 5}, author={Wilson, A G and Johnson, V E}, year={1996}, month={May} } @article{knebel_janson-bjerklie_malley_wilson_marini_1994, title={Comparison of breathing comfort during weaning with two ventilatory modes.}, url={http://dx.doi.org/10.1164/ajrccm.149.1.8111572}, DOI={10.1164/ajrccm.149.1.8111572}, abstractNote={In twenty-one patients ventilated for > or = 3 days, we compared similar levels of partial support provided by synchronized intermittent mandatory ventilation (SIMV) and pressure support ventilation (PSV) in terms of breathing comfort. On a single day, eligible subjects experienced, in random order, both SIMV and PSV weaning protocols (sequential 20% reductions in support at timed intervals) separated by a 1 to 3 h rest. Breathing comfort was defined by subjective ratings of dyspnea and anxiety. Subjects reported significant levels of preweaning dyspnea and anxiety despite resting for at least 6 h. Dyspnea and anxiety were not significantly different between the two methods at any level of support. Our findings suggest that dyspnea and anxiety are higher than expected on "full" ventilator support, and that comfort may not differ between PSV and SIMV during active withdrawal of machine support.}, journal={American Journal of Respiratory and Critical Care Medicine}, author={Knebel, A R and Janson-Bjerklie, S L and Malley, J D and Wilson, A G and Marini, J J}, year={1994}, month={Jan} } @inproceedings{wilson_johnson_1994, title={Priors on scale-space templates}, url={http://dx.doi.org/10.1117/12.179247}, DOI={10.1117/12.179247}, abstractNote={Much of the Bayesian work in image analysis has focused on the incorporation of vague prior knowledge about the true image into the analysis and on the calculation of appropriate estimates of the resulting posterior distribution. However, in the field of medical imaging, there is a need to incorporate more specific prior information. This paper discusses various models for shape deformation and how they can be applied to the specification of priors on scale-space templates. A new model will be proposed that accounts for features at multiple spatial resolutions and the qualitative spatial relationships among those features.}, booktitle={SPIE Proceedings}, author={Wilson, Alyson G. and Johnson, Valen E.}, year={1994}, month={Jul} }