@article{zhou_lahiri_2023, title={Stationary Jackknife}, ISSN={["1467-9892"]}, DOI={10.1111/jtsa.12714}, abstractNote={Variance estimation is an important aspect in statistical inference, especially in the dependent data situations. Resampling methods are ideal for solving this problem since these do not require restrictive distributional assumptions. In this paper, we develop a novel resampling method in the Jackknife family called the stationary jackknife. It can be used to estimate the variance of a statistic in the cases where observations are from a general stationary sequence. Unlike the moving block jackknife, the stationary jackknife computes the jackknife replication by deleting a variable length block and the length has a truncated geometric distribution. Under appropriate assumptions, we can show the stationary jackknife variance estimator is a consistent estimator for the case of the sample mean and, more generally, for a class of nonlinear statistics. Further, the stationary jackknife is shown to provide reasonable variance estimation for a wider range of expected block lengths when compared with the moving block jackknife by simulation.}, journal={JOURNAL OF TIME SERIES ANALYSIS}, author={Zhou, Weilian and Lahiri, Soumendra}, year={2023}, month={Aug} }
@article{bennett_martin_lahiri_2022, title={Fitting sparse Markov models through a collapsed Gibbs sampler}, ISSN={["1613-9658"]}, DOI={10.1007/s00180-022-01310-8}, journal={COMPUTATIONAL STATISTICS}, author={Bennett, Iris and Martin, Donald E. K. and Lahiri, Soumendra Nath}, year={2022}, month={Dec} }
@article{chakraborty_lahiri_wilson_2020, title={A STATISTICAL ANALYSIS OF NOISY CROWDSOURCED WEATHER DATA}, volume={14}, ISSN={["1932-6157"]}, DOI={10.1214/19-AOAS1290}, abstractNote={Spatial prediction of weather-elements like temperature, precipitation, and barometric pressure are generally based on satellite imagery or data collected at ground-stations. None of these data provide information at a more granular or "hyper-local" resolution. On the other hand, crowdsourced weather data, which are captured by sensors installed on mobile devices and gathered by weather-related mobile apps like WeatherSignal and AccuWeather, can serve as potential data sources for analyzing environmental processes at a hyper-local resolution. However, due to the low quality of the sensors and the non-laboratory environment, the quality of the observations in crowdsourced data is compromised. This paper describes methods to improve hyper-local spatial prediction using this varying-quality noisy crowdsourced information. We introduce a reliability metric, namely Veracity Score (VS), to assess the quality of the crowdsourced observations using a coarser, but high-quality, reference data. A VS-based methodology to analyze noisy spatial data is proposed and evaluated through extensive simulations. The merits of the proposed approach are illustrated through case studies analyzing crowdsourced daily average ambient temperature readings for one day in the contiguous United States.}, number={1}, journal={ANNALS OF APPLIED STATISTICS}, author={Chakraborty, Arnab and Lahiri, Soumendra Nath and Wilson, Alyson}, year={2020}, month={Mar}, pages={116–142} }
@article{van hala_bandyopadhyay_lahiri_nordman_2020, title={A general frequency domain method for assessing spatial covariance structures}, volume={26}, ISSN={["1573-9759"]}, DOI={10.3150/19-BEJ1160}, abstractNote={When examining dependence in spatial data, it can be helpful to formally assess spatial covariance structures that may not be parametrically specified or fully model-based. That is, one may wish to test for general features regarding spatial covariance without presupposing any particular, or potentially restrictive, assumptions about the joint data distribution. Current methods for testing spatial covariance are often intended for specialized inference scenarios, usually with spatial lattice data. We propose instead a general method for estimation and testing of spatial covariance structure, which is valid for a variety of inference problems (including nonparametric hypotheses) and applies to a large class of spatial sampling designs with irregular data locations. In this setting, spatial statistics have limiting distributions with complex standard errors depending on the intensity of spatial sampling, the distribution of sampling locations, and the process dependence. The proposed method has the advantage of providing valid inference in the frequency domain without estimation of such standard errors, which are often intractable, and without particular distributional assumptions about the data (e.g., Gaussianity). To illustrate, we develop the method for formally testing isotropy and separability in spatial covariance and consider confidence regions for spatial parameters in variogram model fitting. A broad result is also presented to justify the method for application to other potential problems and general scenarios with testing spatial covariance. The approach uses spatial test statistics, based on an extended version of empirical likelihood, having simple chi-square limits for calibrating tests. We demonstrate the proposed method through several numerical studies.}, number={4}, journal={BERNOULLI}, author={Van Hala, Matthew and Bandyopadhyay, Soutir and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2020}, month={Nov}, pages={2463–2487} }
@article{lee_lahiri_sinha_2020, title={A test of homogeneity of distributions when observations are subject to measurement errors}, volume={76}, ISSN={["1541-0420"]}, DOI={10.1111/biom.13207}, abstractNote={When the observed data are contaminated with errors, the standard two‐sample testing approaches that ignore measurement errors may produce misleading results, including a higher type‐I error rate than the nominal level. To tackle this inconsistency, a nonparametric test is proposed for testing equality of two distributions when the observed contaminated data follow the classical additive measurement error model. The proposed test takes into account the presence of errors in the observed data, and the test statistic is defined in terms of the (deconvoluted) characteristic functions of the latent variables. Proposed method is applicable to a wide range of scenarios as no parametric restrictions are imposed either on the distribution of the underlying latent variables or on the distribution of the measurement errors. Asymptotic null distribution of the test statistic is derived, which is given by an integral of a squared Gaussian process with a complicated covariance structure. For data‐based calibration of the test, a new nonparametric Bootstrap method is developed under the two‐sample measurement error framework and its validity is established. Finite sample performance of the proposed test is investigated through simulation studies, and the results show superior performance of the proposed method than the standard tests that exhibit inconsistent behavior. Finally, the proposed method was applied to real data sets from the National Health and Nutrition Examination Survey. An R package MEtest is available through CRAN.}, number={3}, journal={BIOMETRICS}, author={Lee, DongHyuk and Lahiri, Soumendra N. and Sinha, Samiran}, year={2020}, month={Sep}, pages={821–833} }
@article{giordano_lahiri_parrella_2020, title={GRID: A VARIABLE SELECTION AND STRUCTURE DISCOVERY METHOD FOR HIGH DIMENSIONAL NONPARAMETRIC REGRESSION}, volume={48}, ISSN={["0090-5364"]}, DOI={10.1214/19-AOS1846}, abstractNote={We consider nonparametric regression in high dimensions where only a relatively small subset of a large number of variables are relevant and may have nonlinear effects on the response. We develop methods for variable selection, structure discovery and estimation of the true low-dimensional regression function, allowing any degree of interactions among the relevant variables that need not be specified a-priori. The proposed method, called the GRID, combines empirical likelihood based marginal testing with the local linear estimation machinery in a novel way to select the relevant variables. Further, it provides a simple graphical tool for identifying the low dimensional nonlinear structure of the regression function. Theoretical results establish consistency of variable selection and structure discovery, and also Oracle risk property of the GRID estimator of the regression function, allowing the dimension d of the covariates to grow with the sample size n at the rate d = O(n) for any a ∈ (0,∞) and the number of relevant covariates r to grow at a rate r = O(n) for some γ ∈ (0, 1) under some regularity conditions that, in particular, require finiteness of certain absolute moments of the error variables depending on a. Finite sample properties of the GRID are investigated in a moderately large simulation study.}, number={3}, journal={ANNALS OF STATISTICS}, author={Giordano, Francesco and Lahiri, Soumendra Nath and Parrella, Maria Lucia}, year={2020}, month={Jun}, pages={1848–1874} }
@article{bugni_caner_kock_lahiri_2020, title={Inference in partially identified models with many moment inequalities using Lasso}, volume={206}, ISSN={["1873-1171"]}, DOI={10.1016/j.jspi.2019.09.013}, abstractNote={This paper considers inference in a partially identified moment (in)equality model with many moment inequalities. We propose a novel two-step inference procedure that combines the methods proposed by Chernozhukov et al. (2018a) (Chernozhukov et al., 2018a, hereafter) with a first step moment inequality selection based on the Lasso. Our method controls asymptotic size uniformly, both in the underlying parameter and the data distribution. Also, the power of our method compares favorably with that of the corresponding two-step method in Chernozhukov et al. (2018a) for large parts of the parameter space, both in theory and in simulations. Finally, we show that our Lasso-based first step can be implemented by thresholding standardized sample averages, and so it is straightforward to implement.}, journal={JOURNAL OF STATISTICAL PLANNING AND INFERENCE}, author={Bugni, Federico A. and Caner, Mehmet and Kock, Anders Bredahl and Lahiri, Soumendra}, year={2020}, month={May}, pages={211–248} }
@article{das_lahiri_2019, title={Distributional consistency of the lasso by perturbation bootstrap}, volume={106}, ISSN={["1464-3510"]}, DOI={10.1093/biomet/asz029}, abstractNote={
The lasso is a popular estimation procedure in multiple linear regression. We develop and establish the validity of a perturbation bootstrap method for approximating the distribution of the lasso estimator in a heteroscedastic linear regression model. We allow the underlying covariates to be either random or nonrandom, and show that the proposed bootstrap method works irrespective of the nature of the covariates. We also investigate finite-sample properties of the proposed bootstrap method in a moderately large simulation study.}, number={4}, journal={BIOMETRIKA}, author={Das, Debraj and Lahiri, S. N.}, year={2019}, month={Dec}, pages={957–964} }
@article{lahiri_das_nordman_2019, title={Empirical Likelihood for a Long Range Dependent Process Subordinated to a Gaussian Process}, volume={40}, ISSN={["1467-9892"]}, DOI={10.1111/jtsa.12465}, abstractNote={This article develops empirical likelihood methodology for a class of long range dependent processes driven by a stationary Gaussian process. We consider population parameters that are defined by estimating equations in the time domain. It is shown that the standard block empirical likelihood (BEL) method, with a suitable scaling, has a non‐standard limit distribution based on a multiple Wiener–Itô integral. Unlike the short memory time series case, the scaling constant involves unknown population quantities that may be difficult to estimate. Alternative versions of the empirical likelihood method, involving the expansive BEL (EBEL) methods are considered. It is shown that the EBEL renditions do not require an explicit scaling and, therefore, remove this undesirable feature of the standard BEL. However, the limit law involves the long memory parameter, which may be estimated from the data. Results from a moderately large simulation study on finite sample properties of tests and confidence intervals based on different empirical likelihood methods are also reported.}, number={4}, journal={JOURNAL OF TIME SERIES ANALYSIS}, author={Lahiri, Soumendra N. and Das, Ujjwal and Nordman, Daniel J.}, year={2019}, month={Jul}, pages={447–466} }
@article{das_gregory_lahiri_2019, title={PERTURBATION BOOTSTRAP IN ADAPTIVE LASSO}, volume={47}, ISSN={["0090-5364"]}, DOI={10.1214/18-AOS1741}, abstractNote={The Adaptive Lasso(Alasso) was proposed by Zou [\textit{J. Amer. Statist. Assoc. \textbf{101} (2006) 1418-1429}] as a modification of the Lasso for the purpose of simultaneous variable selection and estimation of the parameters in a linear regression model. Zou (2006) established that the Alasso estimator is variable-selection consistent as well as asymptotically Normal in the indices corresponding to the nonzero regression coefficients in certain fixed-dimensional settings. In an influential paper, Minnier, Tian and Cai [\textit{J. Amer. Statist. Assoc. \textbf{106} (2011) 1371-1382}] proposed a perturbation bootstrap method and established its distributional consistency for the Alasso estimator in the fixed-dimensional setting. In this paper, however, we show that this (naive) perturbation bootstrap fails to achieve second order correctness in approximating the distribution of the Alasso estimator. We propose a modification to the perturbation bootstrap objective function and show that a suitably studentized version of our modified perturbation bootstrap Alasso estimator achieves second-order correctness even when the dimension of the model is allowed to grow to infinity with the sample size. As a consequence, inferences based on the modified perturbation bootstrap will be more accurate than the inferences based on the oracle Normal approximation. We give simulation studies demonstrating good finite-sample properties of our modified perturbation bootstrap method as well as an illustration of our method on a real data set.}, number={4}, journal={ANNALS OF STATISTICS}, author={Das, Debraj and Gregory, Karl and Lahiri, S. N.}, year={2019}, month={Aug}, pages={2080–2116} }
@article{shockley_gupta_harris_lahiri_peddada_2019, title={Quality Control of Quantitative High Throughput Screening Data}, volume={10}, ISSN={["1664-8021"]}, DOI={10.3389/fgene.2019.00387}, abstractNote={Quantitative high throughput screening (qHTS) experiments can generate 1000s of concentration-response profiles to screen compounds for potentially adverse effects. However, potency estimates for a single compound can vary considerably in study designs incorporating multiple concentration-response profiles for each compound. We introduce an automated quality control procedure based on analysis of variance (ANOVA) to identify and filter out compounds with multiple cluster response patterns and improve potency estimation in qHTS assays. Our approach, called Cluster Analysis by Subgroups using ANOVA (CASANOVA), clusters compound-specific response patterns into statistically supported subgroups. Applying CASANOVA to 43 publicly available qHTS data sets, we found that only about 20% of compounds with response values outside of the noise band have single cluster responses. The error rates for incorrectly separating true clusters and incorrectly clumping disparate clusters were both less than 5% in extensive simulation studies. Simulation studies also showed that the bias and variance of concentration at half-maximal response (AC50) estimates were usually within 10-fold when using a weighted average approach for potency estimation. In short, CASANOVA effectively sorts out compounds with “inconsistent” response patterns and produces trustworthy AC50 values.}, journal={FRONTIERS IN GENETICS}, author={Shockley, Keith R. and Gupta, Shuva and Harris, Shawn F. and Lahiri, Soumendra N. and Peddada, Shyamal D.}, year={2019}, month={May} }
@article{das_lahiri_2019, title={Second order correctness of perturbation bootstrap M-estimator of multiple linear regression parameter}, volume={25}, ISSN={["1573-9759"]}, DOI={10.3150/17-BEJ1001}, abstractNote={Consider the multiple linear regression model $y_{i} = \boldsymbol{x}'_{i} \boldsymbol{\beta} + \epsilon_{i}$, where $\epsilon_i$'s are independent and identically distributed random variables, $\mathbf{x}_i$'s are known design vectors and $\boldsymbol{\beta}$ is the $p \times 1$ vector of parameters. An effective way of approximating the distribution of the M-estimator $\boldsymbol{\bar{\beta}}_n$, after proper centering and scaling, is the Perturbation Bootstrap Method. In this current work, second order results of this non-naive bootstrap method have been investigated. Second order correctness is important for reducing the approximation error uniformly to $o(n^{-1/2})$ to get better inferences. We show that the classical studentized version of the bootstrapped estimator fails to be second order correct. We introduce an innovative modification in the studentized version of the bootstrapped statistic and show that the modified bootstrapped pivot is second order correct (S.O.C.) for approximating the distribution of the studentized M-estimator. Additionally, we show that the Perturbation Bootstrap continues to be S.O.C. when the errors $\epsilon_i$'s are independent, but may not be identically distributed. These findings establish perturbation Bootstrap approximation as a significant improvement over asymptotic normality in the regression M-estimation.}, number={1}, journal={BERNOULLI}, author={Das, Debraj and Lahiri, S. N.}, year={2019}, month={Feb}, pages={654–682} }
@article{gregory_lahiri_nordman_2018, title={A SMOOTH BLOCK BOOTSTRAP FOR QUANTILE REGRESSION WITH TIME SERIES}, volume={46}, ISSN={["0090-5364"]}, DOI={10.1214/17-aos1580}, abstractNote={Quantile regression allows for broad (conditional) characterizations of a response distribution beyond conditional means and is of increasing interest in economic and ﬁnancial applications. Because quantile regression estimators have complex limiting distributions, several bootstrap methods for the independent data setting have been proposed, many of which involve smoothing steps to improve bootstrap approximations. Currently, no similar advances in smoothed bootstraps exist for quantile regression with dependent data. To this end, we establish a smooth tapered block bootstrap procedure for approximating the distribution of quantile regression estimators for time series. This bootstrap involves two rounds of smoothing in resampling: individual observations are resampled via kernel smoothing techniques and resampled data blocks are smoothed by tapering. The smooth bootstrap results in performance improvements over previous unsmoothed versions of the block bootstrap as well as normal approximations based on Powell’s kernel variance estimator, which are common in application. Our theoretical results correct errors in proofs for earlier and simpler versions of the (unsmoothed) moving blocks bootstrap for quantile regression and broaden the validity of block bootstraps for this problem under weak conditions. We illustrate the smooth bootstrap through numerical studies and examples. . 05 and 0 . 05 for the sizes n = 50, 100 and 200. This demonstrates the potential usefulness of the SETBB method in QR-based forecasting problems in addition to quantile parameter estimation.}, number={3}, journal={ANNALS OF STATISTICS}, author={Gregory, Karl B. and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2018}, month={Jun}, pages={1138–1166} }
@article{chatterjee_lahiri_2018, title={EDGEWORTH EXPANSIONS FOR A CLASS OF SPECTRAL DENSITY ESTIMATORS AND THEIR APPLICATIONS TO INTERVAL ESTIMATION}, volume={28}, ISSN={["1996-8507"]}, DOI={10.5705/ss.202017.0121}, number={4}, journal={STATISTICA SINICA}, author={Chatterjee, Arindam and Lahiri, Soumendra N.}, year={2018}, month={Oct}, pages={2591–2608} }
@article{lahiri_politis_robinson_2018, title={Emanuel Parzen memorial}, volume={39}, number={3}, journal={Journal of Time Series Analysis}, author={Lahiri, S. N. and Politis, D. N. and Robinson, P. M.}, year={2018}, pages={241–241} }
@article{kim_lahiri_nordman_2018, title={Non-Parametric Spectral Density Estimation Under Long-Range Dependence}, volume={39}, ISSN={["1467-9892"]}, DOI={10.1111/jtsa.12284}, abstractNote={One major aim of time series analysis, particularly in the physical and geo‐sciences, is the estimation of the spectral density function. With weakly dependent time processes, non‐parametric, kernel‐based methods are available for spectral density estimation, which involves smoothing the periodogram by a kernel function. However, a similar non‐parametric approach is presently unavailable for strongly, or long‐range, dependent processes. In particular, as the spectral density function under long‐range dependence commonly has a pole at the origin, kernel‐based methods developed for weakly dependent processes (i.e., with bounded spectral densities) do not apply readily for long‐range dependence without suitable modification. To address this, we propose a non‐parametric kernel‐based method for spectral density estimation, which is valid under both weak and strong dependence. Based on the initial or pilot estimator of the long‐memory parameter, the method involves a frequency domain transformation to dampen the dependence in periodogram ordinates and mimic kernel‐based estimation under weak dependence. Under mild assumptions, the proposed non‐parametric spectral density estimator is shown to be uniformly consistent, and general expressions are provided for rates of estimation error and optimal kernel bandwidths. The method is investigated through simulation and illustrated through data examples, which also consider bandwidth selection.}, number={3}, journal={JOURNAL OF TIME SERIES ANALYSIS}, author={Kim, Young Min and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2018}, month={May}, pages={380–401} }
@article{lahiri_2018, title={Uncertainty Quantification in Robust Inference for Irregularly Spaced Spatial Data Using Block Bootstrap}, volume={80}, ISSN={["0976-8378"]}, DOI={10.1007/s13171-018-0154-6}, journal={SANKHYA-SERIES A-MATHEMATICAL STATISTICS AND PROBABILITY}, author={Lahiri, S. N.}, year={2018}, month={Dec}, pages={173–221} }
@article{van hala_bandyopadhyay_lahiri_nordman_2017, title={On the non-standard distribution of empirical likelihood estimators with spatial data}, volume={187}, ISSN={["1873-1171"]}, DOI={10.1016/j.jspi.2017.02.007}, abstractNote={This note highlights some unusual and unexpected behavior in point estimation using empirical likelihood (EL). In particular, frequency domain formulations of EL, based on the periodogram and estimating functions, have been proposed in the literature for time and spatial processes. However, in contrast to the time series case and most applications of EL, the maximum EL parameter estimator exhibits surprisingly non-standard asymptotic properties for irregularly located spatial data. In fact, a consistent normal limit cannot be guaranteed, as is typical for EL. Despite this, log-ratio EL statistics maintain standard chi-square limits with such spatial data.}, journal={JOURNAL OF STATISTICAL PLANNING AND INFERENCE}, author={Van Hala, Matthew and Bandyopadhyay, Soutir and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2017}, month={Aug}, pages={109–114} }
@inproceedings{cleghern_lahiri_ozaltin_roberts_2017, title={Predicting future states in dota 2 using value-split models of time series attribute data}, booktitle={Proceedings of the 12th International Conference on the Foundations of Digital Games (FDG'17)}, author={Cleghern, Z. and Lahiri, S. and Ozaltin, O. and Roberts, D. L.}, year={2017} }
@article{nordman_lahiri_2016, title={A discussion of Bootstrap prediction intervals for linear, nonlinear, and nonparametric autoregressions by L. Pan and DN Politis}, volume={177}, ISSN={["1873-1171"]}, DOI={10.1016/j.jspi.2015.10.014}, journal={JOURNAL OF STATISTICAL PLANNING AND INFERENCE}, author={Nordman, Daniel J. and Lahiri, Soumendra N.}, year={2016}, month={Oct}, pages={35–40} }
@article{lahiri_robinson_2016, title={Central limit theorems for long range dependent spatial linear processes}, volume={22}, ISSN={["1573-9759"]}, DOI={10.3150/14-bej661}, abstractNote={Central limit theorems are established for the sum, over a spatial region, of observations from a linear process on a d d-dimensional lattice. This region need not be rectangular, but can be irregularly-shaped. Separate results are established for the cases of positive strong dependence, short range dependence, and negative dependence. We provide approximations to asymptotic variances that reveal differential rates of convergence under the three types of dependence. Further, in contrast to the one dimensional (i.e., the time series) case, it is shown that the form of the asymptotic variance in dimensions d > 1 critically depends on the geometry of the sampling region under positive strong dependence and under negative dependence and that there can be non-trivial edge-effects under negative dependence for d > 1. Precise conditions for the presence of edge effects are also given.}, number={1}, journal={BERNOULLI}, author={Lahiri, S. N. and Robinson, Peter M.}, year={2016}, month={Feb}, pages={345–375} }
@article{bandypadhyay_lahiri_nordman_2015, title={A FREQUENCY DOMAIN EMPIRICAL LIKELIHOOD METHOD FOR IRREGULARLY SPACED SPATIAL DATA}, volume={43}, ISSN={["0090-5364"]}, DOI={10.1214/14-aos1291}, abstractNote={This paper develops empirical likelihood methodology for irregularly spaced spatial data in the frequency domain. Unlike the frequency domain empirical likelihood (FDEL) methodology for time series (on a regular grid), the formulation of the spatial FDEL needs special care due to lack of the usual orthogonality properties of the discrete Fourier transform for irregularly spaced data and due to presence of nontrivial bias in the periodogram under different spatial asymptotic structures. A spatial FDEL is formulated in the paper taking into account the effects of these factors. The main results of the paper show that Wilks' phenomenon holds for a scaled version of the logarithm of the proposed empirical likelihood ratio statistic in the sense that it is asymptotically distribution-free and has a chi-squared limit. As a result, the proposed spatial FDEL method can be used to build nonparametric, asymptotically correct confidence regions and tests for covariance parameters that are defined through spectral estimating equations, for irregularly spaced spatial data. In comparison to the more common studentization approach, a major advantage of our method is that it does not require explicit estimation of the standard error of an estimator, which is itself a very difficult problem as the asymptotic variances of many common estimators depend on intricate interactions among several population quantities, including the spectral density of the spatial process, the spatial sampling density and the spatial asymptotic structure. Results from a numerical study are also reported to illustrate the methodology and its finite sample properties.}, number={2}, journal={ANNALS OF STATISTICS}, author={Bandypadhyay, Soutir and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2015}, month={Apr}, pages={519–545} }
@article{gregory_lahiri_nordman_2015, title={A SMOOTH BLOCK BOOTSTRAP FOR STATISTICAL FUNCTIONALS AND TIME SERIES}, volume={36}, ISSN={["1467-9892"]}, DOI={10.1111/jtsa.12117}, abstractNote={Unlike with independent data, smoothed bootstraps have received little consideration for time series, although data smoothing within resampling can improve bootstrap approximations, especially when target distributions depend on smooth population quantities (e.g., marginal densities). For approximating a broad class statistics formulated through statistical functionals (e.g., LL‐estimators, and sample quantiles), we propose a smooth bootstrap by modifying a state‐of‐the‐art (extended) tapered block bootstrap (TBB). Our treatment shows that the smooth TBB applies to time series inference cases not formally established with other TBB versions. Simulations also indicate that smoothing enhances the block bootstrap.}, number={3}, journal={JOURNAL OF TIME SERIES ANALYSIS}, author={Gregory, Karl B. and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2015}, month={May}, pages={442–461} }
@article{gregory_carroll_baladandayuthapani_lahiri_2015, title={A Two-Sample Test for Equality of Means in High Dimension}, volume={110}, ISSN={["1537-274X"]}, DOI={10.1080/01621459.2014.934826}, abstractNote={We develop a test statistic for testing the equality of two population mean vectors in the “large-p-small-n” setting. Such a test must surmount the rank-deficiency of the sample covariance matrix, which breaks down the classic Hotelling T2 test. The proposed procedure, called the generalized component test, avoids full estimation of the covariance matrix by assuming that the p components admit a logical ordering such that the dependence between components is related to their displacement. The test is shown to be competitive with other recently developed methods under ARMA and long-range dependence structures and to achieve superior power for heavy-tailed data. The test does not assume equality of covariance matrices between the two populations, is robust to heteroscedasticity in the component variances, and requires very little computation time, which allows its use in settings with very large p. An analysis of mitochondrial calcium concentration in mouse cardiac muscles over time and of copy number variations in a glioblastoma multiforme dataset from The Cancer Genome Atlas are carried out to illustrate the test. Supplementary materials for this article are available online.}, number={510}, journal={JOURNAL OF THE AMERICAN STATISTICAL ASSOCIATION}, author={Gregory, Karl Bruce and Carroll, Raymond J. and Baladandayuthapani, Veerabhadran and Lahiri, Soumendra N.}, year={2015}, month={Jun}, pages={837–849} }
@article{chatterjee_lahiri_2015, title={Comment}, volume={110}, ISSN={0162-1459 1537-274X}, url={http://dx.doi.org/10.1080/01621459.2015.1102143}, DOI={10.1080/01621459.2015.1102143}, abstractNote={This is an interesting article dealing with the important issue of consistency of bootstrap approximations for distributions of nonregular estimators under local asymptotics. Our discussion of the article (referred to as [MQ] in the following to save space) will focus on two aspects: (1) the use of bootstrap on a nonregular test statistic and (2) an alternative solution to the present testing problem where the issue of nonregularity can be bypassed, allowing the naive bootstrap to be used without any modification. Since the nonregular behavior of the test statistic is present only in a neighborhood of β0 = 0 in model (2) of [MQ], we shall set β0 = 0 and restrict attention to the local asymptotic structure:}, number={512}, journal={Journal of the American Statistical Association}, publisher={Informa UK Limited}, author={Chatterjee, A. and Lahiri, S. N.}, year={2015}, month={Oct}, pages={1434–1438} }
@article{chatterjee_gupta_lahiri_2015, title={On the residual empirical process based on the ALASSO in high dimensions and its functional oracle property}, volume={186}, ISSN={["1872-6895"]}, DOI={10.1016/j.jeconom.2015.02.012}, abstractNote={This paper considers post variable-selection inference in a high dimensional penalized regression model based on the ALASSO method of Zou (2006). It is shown that under suitable sparsity conditions, the residual empirical process based on the ALASSO provides valid inference methodology in very high dimensional regression problems where conventional methods fail. It is also shown that the ALASSO based residual empirical process satisfies a functional oracle property, i.e., in addition to selecting the set of relevant variables with probability tending to one, the ALASSO based residual empirical process converges to the same limiting Gaussian process as the OLS based residual empirical process under the oracle. The functional oracle property is critically exploited to construct asymptotically valid confidence bands for the error distribution function and prediction intervals for unobserved values of the response variable in the high dimensional set up, where traditional non-penalized methods are known to fail. Simulation results are presented illustrating finite sample performance of the proposed methodology.}, number={2}, journal={JOURNAL OF ECONOMETRICS}, author={Chatterjee, A. and Gupta, S. and Lahiri, S. N.}, year={2015}, month={Jun}, pages={317–324} }
@article{staicu_lahiri_carroll_2015, title={Significance tests for functional data with complex dependence structure}, volume={156}, ISSN={["1873-1171"]}, DOI={10.1016/j.jspi.2014.08.006}, abstractNote={We propose an L2-norm based global testing procedure for the null hypothesis that multiple group mean functions are equal, for functional data with complex dependence structure. Specifically, we consider the setting of functional data with a multilevel structure of the form groups-clusters or subjects-units, where the unit-level profiles are spatially correlated within the cluster, and the cluster-level data are independent. Orthogonal series expansions are used to approximate the group mean functions and the test statistic is estimated using the basis coefficients. The asymptotic null distribution of the test statistic is developed, under mild regularity conditions. To our knowledge this is the first work that studies hypothesis testing, when data have such complex multilevel functional and spatial structure. Two small-sample alternatives, including a novel block bootstrap for functional data, are proposed, and their performance is examined in simulation studies. The paper concludes with an illustration of a motivating experiment.}, journal={JOURNAL OF STATISTICAL PLANNING AND INFERENCE}, author={Staicu, Ana-Maria and Lahiri, Soumen N. and Carroll, Raymond J.}, year={2015}, month={Jan}, pages={1–13} }
@misc{nordman_lahiri_2014, title={A review of empirical likelihood methods for time series}, volume={155}, ISSN={["1873-1171"]}, DOI={10.1016/j.jspi.2013.10.001}, abstractNote={We summarize advances in empirical likelihood (EL) for time series data. The EL formulation for independent data is briefly presented, which can apply for inference in special time series problems, reproducing the Wilks phenomenon of chi-square limits for log-ratio statistics. For more general inference with time series, versions of time domain block-based EL, and its generalizations based on divergence measures, are described along with their distributional properties; some approaches are intended for mixing time processes and others are tailored to time series with a Markovian structure. We also present frequency domain EL methods based on the periodogram. Finally, EL for long-range dependent processes is reviewed as well as recent advantages in EL for high dimensional problems. Some illustrative numerical examples are given along with a summary of open research issues for EL with dependent data.}, journal={JOURNAL OF STATISTICAL PLANNING AND INFERENCE}, author={Nordman, Daniel J. and Lahiri, Soumendra N.}, year={2014}, month={Dec}, pages={1–18} }
@article{gupta_lahiri_2014, title={Comment}, volume={109}, ISSN={["1537-274X"]}, DOI={10.1080/01621459.2014.905789}, abstractNote={which is again the LS estimator. Minimizing LS(b) with b > 1 gives a more robust way of doing LS in which the effect of potential outliers is diminished by the local averaging of b neighboring values; details are omitted due to lack of space. Similarly to the above, minimizing L1(1) is equivalent to L1 regression, whereas minimizing L1(b) with b > 1 gives additional robustness. Finally, let us revisit the general case of model (1) with μp(xj ) = β0 + x jβp. When p > 1, the regressors xj cannot be sorted in ascending order. One could instead use a localaveraging or nearest-neighbor technique to compute the subsample means. But no such trick is needed in the most interesting case of b = 1 since the quantities LS(1) and L1(1) are unequivocally defined as}, number={507}, journal={JOURNAL OF THE AMERICAN STATISTICAL ASSOCIATION}, author={Gupta, Shuva and Lahiri, S. N.}, year={2014}, month={Jul}, pages={1013–1015} }
@book{contemporary developments in statistical theory: a festschrift for hira lal koul_2014, publisher={Cham: Springer}, year={2014} }
@article{lahiri_xie_2014, title={Preface to the Professor Kesar Singh Memorial Issue}, volume={20}, ISSN={1572-3127}, url={http://dx.doi.org/10.1016/J.STAMET.2014.04.002}, DOI={10.1016/J.STAMET.2014.04.002}, journal={Statistical Methodology}, publisher={Elsevier BV}, author={Lahiri, Soumendra and Xie, Min-ge}, year={2014}, month={Sep}, pages={1} }
@inbook{lahiri_schick_sengupta_sriram_2014, place={Cham, Switzerland}, series={Springer Proceedings in Mathematics & Statistics}, title={Professor Hira Lal Koul’s Contribution to Statistics}, ISBN={9783319026503 9783319026510}, ISSN={2194-1009 2194-1017}, url={http://dx.doi.org/10.1007/978-3-319-02651-0_1}, DOI={10.1007/978-3-319-02651-0_1}, abstractNote={Professor Hira Koul received his Ph.D. in Statistics from the University of California, Berkeley in 1967 under the supervision of Professor Peter Bickel. He has the unique distinction of being the first doctoral student of Professor Bickel. True to his training at Berkeley, in the initial years of his research career, he focused on developing asymptotic theory of statistical inference. He pioneered the approach of Asymptotic Uniform Linearity (AUL) as a theoretical tool for studying properties of the empirical process based on residuals from a semiparametric model. This approach has been widely employed by several authors in studying the asymptotic properties of tests of composite hyptheses, and has been a particularly powerful tool for deriving limit laws of goodness-of-fit tests. At around the same time, he also developed the theory of weighted empirical processes which played a fundamental role in the study of asymptotic distribution of robust estimators (e.g., Rank-based estimators and M-estimators) in linear regression models. An elegant account of the theory of weighted empirical processes for independent as well as dependent random variables is given in his monographs on the topic}, booktitle={Contemporary Developments in Statistical Theory}, publisher={Springer International Publishing}, author={Lahiri, Soumendra and Schick, Anton and SenGupta, Ashis and Sriram, T.N.}, editor={Lahiri, S. and Schick, A. and SenGupta, A. and Sriram, T.Editors}, year={2014}, pages={1–7}, collection={Springer Proceedings in Mathematics & Statistics} }
@article{dasgupta_lahiri_stoyanov_2014, title={Sharp fixed n bounds and asymptotic expansions for the mean and the median of a Gaussian sample maximum, and applications to the Donoho-Jin model}, volume={20}, ISSN={["1878-0954"]}, DOI={10.1016/j.stamet.2014.01.002}, abstractNote={We are interested in the sample maximum X(n) of an i.i.d standard normal sample of size n. First, we derive two-sided bounds on the mean and the median of X(n) that are valid for any fixed n≥n0, where n0 is ‘small’, e.g. n0=7. These fixed n bounds are established by using new very sharp bounds on the standard normal quantile function Φ−1(1−p). The bounds found in this paper are currently the best available explicit nonasymptotic bounds, and are of the correct asymptotic order up to the number of terms involved. Then we establish exact three term asymptotic expansions for the mean and the median of X(n). This is achieved by reducing the extreme value problem to a problem about sample means. This technique is general and should apply to suitable other distributions. One of our main conclusions is that the popular approximation E[X(n)]≈2logn should be discontinued, unless n is fantastically large. Better approximations are suggested in this article. An application of some of our results to the Donoho–Jin sparse signal recovery model is made. The standard Cauchy case is touched on at the very end.}, journal={STATISTICAL METHODOLOGY}, author={DasGupta, Anirban and Lahiri, S. N. and Stoyanov, Jordan}, year={2014}, month={Sep}, pages={40–62} }
@book{akritas_lahiri_dimitris n._politis_2014, title={Topics in nonparametric statistics: Proceedings of the first Conference of the International Society for Nonparametric Statistics}, publisher={Berlin: Springer}, year={2014} }
@article{nordman_bunzel_lahiri_2013, title={A NONSTANDARD EMPIRICAL LIKELIHOOD FOR TIME SERIES}, volume={41}, ISSN={["0090-5364"]}, DOI={10.1214/13-aos1174}, abstractNote={Standard blockwise empirical likelihood (BEL) for stationary, weakly dependent time series requires specifying a fixed block length as a tuning parameter for setting confidence regions. This aspect can be difficult and impacts coverage accuracy. As an alternative, this paper proposes a new version of BEL based on a simple, though nonstandard, data-blocking rule which uses a data block of every possible length. Consequently, the method does not involve the usual block selection issues and is also anticipated to exhibit better coverage performance. Its nonstandard blocking scheme, however, induces nonstandard asymptotics and requires a significantly different development compared to standard BEL. We establish the large-sample distribution of log-ratio statistics from the new BEL method for calibrating confidence regions for mean or smooth function parameters of time series. This limit law is not the usual chi-square one, but is distribution-free and can be reproduced through straightforward simulations. Numerical studies indicate that the proposed method generally exhibits better coverage accuracy than standard BEL.}, number={6}, journal={ANNALS OF STATISTICS}, author={Nordman, Daniel J. and Bunzel, Helle and Lahiri, Soumendra N.}, year={2013}, month={Dec}, pages={3050–3073} }
@article{kim_lahiri_nordman_2013, title={A Progressive Block Empirical Likelihood Method for Time Series}, volume={108}, ISSN={["1537-274X"]}, DOI={10.1080/01621459.2013.847374}, abstractNote={This article develops a new blockwise empirical likelihood (BEL) method for stationary, weakly dependent time processes, called the progressive block empirical likelihood (PBEL). In contrast to the standard version of BEL, which uses data blocks of constant length for a given sample size and whose performance can depend crucially on the block length selection, this new approach involves a data-blocking scheme where blocks increase in length by an arithmetic progression. Consequently, no block length selections are required for the PBEL method, which implies a certain type of robustness for this version of BEL. For inference of smooth functions of the process mean, theoretical results establish the chi-squared limit of the log-likelihood ratio based on PBEL, which can be used to calibrate confidence regions. Using the same progressive block scheme, distributional extensions are also provided for other nonparametric likelihoods with time series in the family of Cressie–Read discrepancies. Simulation evidence indicates that the PBEL method can perform comparably to the standard BEL in coverage accuracy (when the latter uses a “good” block choice) and can exhibit more stability, without the need to select a usual block length. Supplementary materials for this article are available online.}, number={504}, journal={JOURNAL OF THE AMERICAN STATISTICAL ASSOCIATION}, author={Kim, Young Min and Lahiri, Soumendra N. and Nordman, Daniel J.}, year={2013}, month={Dec}, pages={1506–1516} }
@article{chatterjee_lahiri_2013, title={Rates of convergence of the adaptive LASSO estimators to the Oracle distribution and higher order refinements by the bootstrap}, volume={41}, DOI={10.1214/13-aos1106}, abstractNote={Zou [J. Amer. Statist. Assoc. 101 (2006) 1418-1429] proposed the Adaptive LASSO (ALASSO) method for simultaneous variable selection and estimation of the regression parameters, and established its oracle property. In this paper, we investigate the rate of convergence of the ALASSO estimator to the oracle distribution when the dimension of the regression parameters may grow to infinity with the sample size. It is shown that the rate critically depends on the choices of the penalty parameter and the initial estimator, among other factors, and that confidence intervals (CIs) based on the oracle limit law often have poor coverage accuracy. As an alternative, we consider the residual bootstrap method for the ALASSO estimators that has been recently shown to be consistent; cf. Chatterjee and Lahiri [J. Amer. Statist. Assoc. 106 (2011a) 608-625]. We show that the bootstrap applied to a suitable studentized version of the ALASSO estimator achieves second-order correctness, even when the dimension of the regression parameters is unbounded. Results from a moderately large simulation study show marked improvement in coverage accuracy for the bootstrap CIs over the oracle based CIs.}, number={3}, journal={Annals of Statistics}, author={Chatterjee, A. and Lahiri, S. N.}, year={2013}, pages={1232–1259} }
@article{nordman_lahiri_2012, title={Block Bootstraps for Time Series With Fixed Regressors}, volume={107}, ISSN={0162-1459 1537-274X}, url={http://dx.doi.org/10.1080/01621459.2011.646929}, DOI={10.1080/01621459.2011.646929}, abstractNote={This article examines block bootstrap methods in linear regression models with weakly dependent error variables and nonstochastic regressors. Contrary to intuition, the tapered block bootstrap (TBB) with a smooth taper not only loses its superior bias properties but may also fail to be consistent in the regression problem. A similar problem, albeit at a smaller scale, is shown to exist for the moving and the circular block bootstrap (MBB and CBB, respectively). As a remedy, an additional block randomization step is introduced that balances out the effects of nonuniform regression weights, and restores the superiority of the (modified) TBB. The randomization step also improves the MBB or CBB. Interestingly, the stationary bootstrap (SB) automatically balances out regression weights through its probabilistic blocking mechanism, without requiring any modification, and enjoys a kind of robustness. Optimal block sizes are explicitly determined for block bootstrap variance estimators under regression. Finite sample performance and practical uses of the methods are illustrated through a simulation study and two data examples, respectively. Supplementary materials are available online.}, number={497}, journal={Journal of the American Statistical Association}, publisher={Informa UK Limited}, author={Nordman, Daniel J. and Lahiri, Soumendra N.}, year={2012}, month={Mar}, pages={233–246} }
@article{maitra_melnykov_lahiri_2012, title={Bootstrapping for Significance of Compact Clusters in Multidimensional Datasets}, volume={107}, ISSN={0162-1459 1537-274X}, url={http://dx.doi.org/10.1080/01621459.2011.646935}, DOI={10.1080/01621459.2011.646935}, abstractNote={This article proposes a bootstrap approach for assessing significance in the clustering of multidimensional datasets. The procedure compares two models and declares the more complicated model a better candidate if there is significant evidence in its favor. The performance of the procedure is illustrated on two well-known classification datasets and comprehensively evaluated in terms of its ability to estimate the number of components via extensive simulation studies, with excellent results. The methodology is also applied to the problem of k-means color quantization of several standard images in the literature and is demonstrated to be a viable approach for determining the minimal and optimal numbers of colors needed to display an image without significant loss in resolution. Additional illustrations and performance evaluations are provided in the online supplementary material.}, number={497}, journal={Journal of the American Statistical Association}, publisher={Informa UK Limited}, author={Maitra, Ranjan and Melnykov, Volodymyr and Lahiri, Soumendra N.}, year={2012}, month={Mar}, pages={378–392} }
@inproceedings{mukhopadhyay_parzen_lahiri_2012, place={New York}, title={From data to constraints}, volume={1443}, url={http://dx.doi.org/10.1063/1.3703617}, DOI={10.1063/1.3703617}, abstractNote={Jaynes' Maximum Entropy (MaxEnt) inference starts with the assumption that we have a set of known constraints over the distribution. In statistical physics, we have a good intuition about the conserved macroscopic variables. It should not be surprising that in a real world applications, we have no idea about which coordinates to use for specifying the state of the system. In other words, we only observe empirical data and we have to take a decision on the constraints from the data. In an effort to circumvent this limitation, we propose a nonparametric quantile based method to extract relevant and significant facts (sufficient statistics) for the maximum entropy exponential model.}, number={1}, booktitle={AIP Conference Proceedings}, publisher={AIP}, author={Mukhopadhyay, S. and Parzen, E. and Lahiri, S. N.}, year={2012}, pages={32–39} }
@article{nordman_lahiri_2011, title={Bias expansion of spatial statistics and approximation of differenced lattice point counts}, volume={121}, ISSN={0253-4142 0973-7685}, url={http://dx.doi.org/10.1007/s12044-011-0024-9}, DOI={10.1007/s12044-011-0024-9}, number={2}, journal={Proceedings - Mathematical Sciences}, publisher={Springer Science and Business Media LLC}, author={Nordman, Daniel J and Lahiri, Soumendra N}, year={2011}, month={May}, pages={229–244} }
@article{lahiri_mukhopadhyay_2011, title={Comments on: Subsampling weakly dependent time series and application to extremes}, volume={20}, ISSN={1133-0686 1863-8260}, url={http://dx.doi.org/10.1007/S11749-011-0273-Z}, DOI={10.1007/S11749-011-0273-Z}, number={3}, journal={TEST}, publisher={Springer Science and Business Media LLC}, author={Lahiri, S. N. and Mukhopadhyay, S.}, year={2011}, month={Nov}, pages={491–496} }
@article{chatterjee_lahiri_2011, title={Strong consistency of Lasso estimators}, volume={73}, ISSN={0976-836X 0976-8378}, url={http://dx.doi.org/10.1007/S13171-011-0006-0}, DOI={10.1007/S13171-011-0006-0}, number={1}, journal={Sankhya A}, publisher={Springer Science and Business Media LLC}, author={Chatterjee, A. and Lahiri, S. N.}, year={2011}, month={Feb}, pages={55–78} }
@article{bandyopadhyay_lahiri_2010, title={Resampling-based bias-corrected time series prediction}, volume={140}, ISSN={0378-3758}, url={http://dx.doi.org/10.1016/j.jspi.2010.04.042}, DOI={10.1016/j.jspi.2010.04.042}, abstractNote={In this paper, we consider estimation of the mean squared prediction error (MSPE) of the best linear predictor of (possibly) nonlinear functions of finitely many future observations in a stationary time series. We develop a resampling methodology for estimating the MSPE when the unknown parameters in the best linear predictor are estimated. Further, we propose a bias corrected MSPE estimator based on the bootstrap and establish its second order accuracy. Finite sample properties of the method are investigated through a simulation study.}, number={12}, journal={Journal of Statistical Planning and Inference}, publisher={Elsevier BV}, author={Bandyopadhyay, S. and Lahiri, S.N.}, year={2010}, month={Dec}, pages={3775–3788} }
@article{lahiri_furukawa_lee_2007, title={A nonparametric plug-in rule for selecting optimal block lengths for block bootstrap methods}, volume={4}, ISSN={1572-3127}, url={http://dx.doi.org/10.1016/j.stamet.2006.08.002}, DOI={10.1016/j.stamet.2006.08.002}, abstractNote={In this paper, we consider the problem of empirical choice of optimal block sizes for block bootstrap estimation of population parameters. We suggest a nonparametric plug-in principle that can be used for estimating ‘mean squared error’-optimal smoothing parameters in general curve estimation problems, and establish its validity for estimating optimal block sizes in various block bootstrap estimation problems. A key feature of the proposed plug-in rule is that it can be applied without explicit analytical expressions for the constants that appear in the leading terms of the optimal block lengths. Furthermore, we also discuss the computational efficacy of the method and explore its finite sample properties through a simulation study.}, number={3}, journal={Statistical Methodology}, publisher={Elsevier BV}, author={Lahiri, S.N. and Furukawa, K. and Lee, Y.-D.}, year={2007}, month={Jul}, pages={292–321} }
@article{zhu_lahiri_2007, title={Bootstrapping the Empirical Distribution Function of a Spatial Process}, volume={10}, ISSN={1387-0874 1572-9311}, url={http://dx.doi.org/10.1007/s11203-005-2349-4}, DOI={10.1007/s11203-005-2349-4}, number={2}, journal={Statistical Inference for Stochastic Processes}, publisher={Springer Science and Business Media LLC}, author={Zhu, Jun and Lahiri, S. N.}, year={2007}, month={Jul}, pages={107–145} }
@article{lahiri_chatterjee_maiti_2007, title={Normal approximation to the hypergeometric distribution in nonstandard cases and a sub-Gaussian Berry–Esseen theorem}, volume={137}, ISSN={0378-3758}, url={http://dx.doi.org/10.1016/j.jspi.2007.03.033}, DOI={10.1016/j.jspi.2007.03.033}, abstractNote={In this paper, we consider simple random sampling without replacement from a dichotomous finite population. We investigate accuracy of the Normal approximation to the Hypergeometric probabilities for a wide range of parameter values, including the nonstandard cases where the sampling fraction tends to one and where the proportion of the objects of interest in the population tends to the boundary values, zero and one. We establish a non-uniform Berry–Esseen theorem for the Hypergeometric distribution which shows that in the nonstandard cases, the rate of Normal approximation to the Hypergeometric distribution can be considerably slower than the rate of Normal approximation to the Binomial distribution. We also report results from a moderately large numerical study and provide some guidelines for using the Normal approximation to the Hypergeometric distribution in finite samples.}, number={11}, journal={Journal of Statistical Planning and Inference}, publisher={Elsevier BV}, author={Lahiri, S.N. and Chatterjee, A. and Maiti, T.}, year={2007}, month={Nov}, pages={3570–3590} }
@article{nordman_lahiri_2005, title={Validity of the Sampling Window Method for Long-range dependent linear processes}, volume={21}, ISSN={0266-4666 1469-4360}, url={http://dx.doi.org/10.1017/S0266466605050541}, DOI={10.1017/S0266466605050541}, abstractNote={The sampling window method of Hall, Jing, and Lahiri (1998, Statistica Sinica 8, 1189–1204) is known to consistently estimate the distribution of the sample mean for a class of long-range dependent processes, generated by transformations of Gaussian time series. This paper shows that the same nonparametric subsampling method is also valid for an entirely different category of long-range dependent series that are linear with possibly non-Gaussian innovations. For these strongly dependent time processes, subsampling confidence intervals allow inference on the process mean without knowledge of the underlying innovation distribution or the long-memory parameter. The finite-sample coverage accuracy of the subsampling method is examined through a numerical study.The authors thank two referees for comments and suggestions that greatly improved an earlier draft of the paper. This research was partially supported by U.S. National Science Foundation grants DMS 00-72571 and DMS 03-06574 and by the Deutsche Forschungsgemeinschaft (SFB 475).}, number={06}, journal={Econometric Theory}, publisher={Cambridge University Press (CUP)}, author={Nordman, Daniel J. and Lahiri, Soumendra N.}, year={2005}, month={Sep} }
@article{lahiri_mukherjee_2004, title={Asymptotic distributions of M-estimators in a spatial regression model under some fixed and stochastic spatial sampling designs}, volume={56}, ISSN={0020-3157 1572-9052}, url={http://dx.doi.org/10.1007/bf02530543}, DOI={10.1007/bf02530543}, number={2}, journal={Annals of the Institute of Statistical Mathematics}, publisher={Springer Science and Business Media LLC}, author={Lahiri, S. N. and Mukherjee, Kanchan}, year={2004}, month={Jun}, pages={225–250} }
@article{karabulut_lahiri_1997, title={Two-term Edgeworth expansion for M-estimators of a linear regression parameter without Cramér-type conditions and an application to the bootstrap}, volume={62}, ISSN={0263-6115}, url={http://dx.doi.org/10.1017/S1446788700001063}, DOI={10.1017/S1446788700001063}, abstractNote={Abstract A two-term Edgeworth expansion for the distribution of an M-estimator of a simple linear regression parameter is obtained without assuming any Cramér-type conditions. As an application, it is shown that certain modification of the naive bootstrap procedure is second order correct even when the error variables have a lattice distribution. This is in marked contrast with the results of Singh on the sample mean of independent and identically distributed random variables.}, number={3}, journal={Journal of the Australian Mathematical Society. Series A. Pure Mathematics and Statistics}, publisher={Cambridge University Press (CUP)}, author={Karabulut, I. and Lahiri, S. N.}, year={1997}, month={Jun}, pages={361–370} }