@article{hall_inoue_shin_2008, title={Entropy-based moment selection in the presence of weak identification}, volume={27}, ISSN={["0747-4938"]}, DOI={10.1080/07474930801960261}, abstractNote={Hall et al. (2007) propose a method for moment selection based on an information criterion that is a function of the entropy of the limiting distribution of the Generalized Method of Moments (GMM) estimator. They establish the consistency of the method subject to certain conditions that include the identification of the parameter vector by at least one of the moment conditions being considered. In this article, we examine the limiting behavior of this moment selection method when the parameter vector is weakly identified by all the moment conditions being considered. It is shown that the selected moment condition is random and hence not consistent in any meaningful sense. As a result, we propose a two-step procedure for moment selection in which identification is first tested using a statistic proposed by Stock and Yogo (2003) and then only if this statistic indicates identification does the researcher proceed to the second step in which the aforementioned information criterion is used to select moments. The properties of this two-step procedure are contrasted with those of strategies based on either using all available moments or using the information criterion without the identification pre-test. The performances of these strategies are compared via an evaluation of the finite sample behavior of various methods for inference about the parameter vector. The inference methods considered are based on the Wald statistic, Anderson and Rubin's (1949) statistic, Kleibergen (2002) K statistic, and combinations thereof in which the choice is based on the outcome of the test for weak identification.}, number={4-6}, journal={ECONOMETRIC REVIEWS}, author={Hall, Alastair R. and Inoue, Atsushi and Shin, Changmock}, year={2008}, pages={398–427} } @article{hall_inoue_2007, title={The large sample behaviour of the generalized method of moments estimator in misspecified models (vol 114, pg 361, 2003)}, volume={141}, ISSN={["0304-4076"]}, DOI={10.1016/j.jeconom.2007.02.006}, number={2}, journal={JOURNAL OF ECONOMETRICS}, author={Hall, Alastair R. and Inoue, Atsushi}, year={2007}, month={Dec}, pages={1418–1418} } @article{jayaraman_hall_genzer_2006, title={Computer Simulation Study of Molecular Recognition in Model DNA Microarrays}, volume={91}, ISSN={0006-3495}, url={http://dx.doi.org/10.1529/biophysj.106.086173}, DOI={10.1529/biophysj.106.086173}, abstractNote={DNA microarrays have been widely adopted by the scientific community for a variety of applications. To improve the performance of microarrays there is a need for a fundamental understanding of the interplay between the various factors that affect microarray sensitivity and specificity. We use lattice Monte Carlo simulations to study the thermodynamics and kinetics of hybridization of single-stranded target genes in solution with complementary probe DNA molecules immobilized on a microarray surface. The target molecules in our system contain 48 segments and the probes tethered on a hard surface contain 8-24 segments. The segments on the probe and target are distinct and each segment represents a sequence of nucleotides ( approximately 11 nucleotides). Each probe segment interacts exclusively with its unique complementary target segment with a single hybridization energy; all other interactions are zero. We examine how the probe length, temperature, or hybridization energy, and the stretch along the target that the probe segments complement, affect the extent of hybridization. For systems containing single probe and single target molecules, we observe that as the probe length increases, the probability of binding all probe segments to the target decreases, implying that the specificity decreases. We observe that probes 12-16 segments ( approximately 132-176 nucleotides) long gave the highest specificity and sensitivity. This agrees with the experimental results obtained by another research group, who found an optimal probe length of 150 nucleotides. As the hybridization energy increases, the longer probes are able to bind all their segments to the target, thus improving their specificity. The hybridization kinetics reveals that the segments at the ends of the probe are most likely to start the hybridization. The segments toward the center of the probe remain bound to the target for a longer time than the segments at the ends of the probe.}, number={6}, journal={Biophysical Journal}, publisher={Elsevier BV}, author={Jayaraman, Arthi and Hall, Carol K. and Genzer, Jan}, year={2006}, month={Sep}, pages={2227–2236} } @article{hall_inoue_jana_shin_2007, title={Information in generalized method of moments estimation and entropy-based moment selection}, volume={138}, ISSN={["0304-4076"]}, DOI={10.1016/j.jeconom.2006.05.006}, abstractNote={In this paper, we make five contributions to the literature on information and entropy in generalized method of moments (GMM) estimation. First, we introduce the concept of the long run canonical correlations (LRCCs) between the true score vector and the moment function f(vt,θ0) and show that they provide a metric for the information contained in the population moment condition E[f(vt,θ0)]=0. Second, we show that the entropy of the limiting distribution of the GMM estimator can be written in terms of these LRCCs. Third, motivated by the above results, we introduce an information criterion based on this entropy that can be used as a basis for moment selection. Fourth, we introduce the concept of nearly redundant moment conditions and use it to explore the connection between redundancy and weak identification. Fifth, we analyse the behaviour of the aforementioned entropy-based moment selection method in two scenarios of interest; these scenarios are: (i) nonlinear dynamic models where the parameter vector is identified by all the combinations of moment conditions considered; (ii) linear static models where the parameter vector may be weakly identified for some of the combinations considered. The first of these contributions rests on a generalized information equality that is proved in the paper, and may be of interest in its own right.}, number={2}, journal={JOURNAL OF ECONOMETRICS}, author={Hall, Alastair R. and Inoue, Atsushi and Jana, Kalidas and Shin, Changmock}, year={2007}, month={Jun}, pages={488–512} } @article{peixe_hall_kyriakoulis_2006, title={The mean squared error of the instrumental variables estimator when the disturbance has an elliptical distribution}, volume={25}, ISSN={["1532-4168"]}, DOI={10.1080/07474930500545488}, abstractNote={This paper generalizes Nagar's (1959) approximation to the finite sample mean squared error (MSE) of the instrumental variables (IV) estimator to the case in which the errors possess an elliptical distribution whose moments exist up to infinite order. This allows for types of excess kurtosis exhibited by some financial data series. This approximation is compared numerically to Knight's (1985) formulae for the exact moments of the IV estimator under nonnormality. We use the results to explore two questions on instrument selection. First, we complement Buse's (1992) analysis by considering the impact of additional instruments on both bias and MSE. Second, we evaluate the properties of Andrews's (1999) selection method in terms of the bias and MSE of the resulting IV estimator.}, number={1}, journal={ECONOMETRIC REVIEWS}, author={Peixe, FPM and Hall, AR and Kyriakoulis, K}, year={2006}, pages={117–138} } @book{hall_2005, title={Generalized method of moments}, ISBN={0198775210}, publisher={Oxford; New York: Oxford University Press}, author={Hall, A. R.}, year={2005} } @article{hall_inoue_peixe_2003, title={Covariance matrix estimation and the limiting behavior of the overidentifying restrictions test in the presence of neglected structural instability}, volume={19}, ISSN={["0266-4666"]}, DOI={10.1017/S0266466603196041}, abstractNote={We consider the limiting behavior of the overidentifying restrictions test in the presence of neglected structural instability at a single “break point.” It is shown that the test need not be consistent against this type of misspecification. If it is consistent then it emerges that the limiting behavior of this test statistic depends on the covariance matrix estimator employed. In this paper we consider the case in which a heteroskedasticity autocorrelation covariance (HAC) is used. It is shown that (i) if the HAC estimator is based on uncentered autocovariances then the overidentifying restrictions test diverges at rate T/bT where T is the sample size and bT is the bandwidth; (ii) if the HAC estimator is based on centered autocovariances then the rate of increase of the overidentifying restrictions test is either T/bT or T depending on the form of the instability. These results are used to provide conditions for the consistency of the method of moment selection of Andrews (1999, Econometrica 67, 543–564) when certain elements of the candidate set of moments are misspecified as a result of neglected structural instability.This work was begun while Hall was a Senior Research Fellow and Peixe was a graduate student at the Department of Economics, University of Birmingham, UK, and this support is gratefully acknowledged. Peixe also gratefully acknowledges financial support from FCT under Grant PRAXIS XXI/BD/13453/97. We are very grateful for the very useful comments of Don Andrews and two anonymous referees.}, number={6}, journal={ECONOMETRIC THEORY}, author={Hall, AR and Inoue, A and Peixe, FPM}, year={2003}, month={Dec}, pages={962–983} } @article{hall_inoue_2003, title={The large sample behaviour of the generalized method of moments estimator in misspecified models}, volume={114}, ISSN={["0304-4076"]}, DOI={10.1016/S0304-4076(03)00089-7}, abstractNote={This paper presents the limiting distribution theory for the GMM estimator when the estimation is based on a population moment condition which is subject to non-local (or fixed) misspecification. It is shown that if the parameter vector is overidentified then the weighting matrix plays a far more fundamental role than it does in the corresponding analysis for correctly specified models. Specifically, the rate of convergence of the estimator depends on the rate of convergence of the weighting matrix to its probability limit. The analysis is presented for four particular choices of weighting matrix which are commonly used in practice. In each case the limiting distribution theory is different, and also different from the limiting distribution in a correctly specified model. Statistics are proposed which allow the researcher to test hypotheses about the parameters in misspecified models.}, number={2}, journal={JOURNAL OF ECONOMETRICS}, author={Hall, AR and Inoue, A}, year={2003}, month={Jun}, pages={361–394} } @article{hall_2001, title={Testing target-zone models using efficient method of moments - Coment}, volume={19}, number={3}, journal={Journal of Business & Economic Statistics}, author={Hall, A. R.}, year={2001}, pages={269–271} } @article{hu_hall_nychka_2000, title={A nonparametric approach to stochastic discount factor estimation}, volume={14}, number={2000}, journal={Advances in Econometrics}, author={Hu, F. and Hall, A. R. and Nychka, D.}, year={2000}, pages={155–176} } @article{hall_2000, title={Covariance matrix estimation and the power of the overidentifying restrictions test}, volume={68}, ISSN={["0012-9682"]}, DOI={10.1111/1468-0262.00171}, abstractNote={class of heteroscedasticity and autocorrelation consistent covariance HACC matrix estimators that are now routinely used by practitioners in the calculation of the two-step GMM estimator. In applications, it is customary to assume the model is correctly specified during the estimation, and only to assess the specification after the second-step using a statistic such as the overidentifying restrictions test. One important consequence of this methodology is that the HACC estimator is calculated under the assumption that the model is correctly specified. In this paper, we examine the implications of model misspecification for the HACC covariance matrix estimator and the overidentifying restrictions test. It is shown that the HACC estimator is asymptotically equivalent to the sum of two matrices: one of these . . matrices is nonsingular and O 1 ; the other is of rank one and Ob where b is the TT bandwidth used in the HACC estimator. It is shown that this structure implies the inverse of the HACC estimator converges to a singular matrix as b " ' with T, and this T limiting matrix has rank q y 1 where q is the dimension of the moment condition. It is shown that this limiting behavior translates into an overidentifying restrictions test that is . OT rb . In contrast, it is shown that the overidentifying restrictions test is consistent pT . and OT if the covariance matrix estimator is consistent under both null and alterna- p tive. This can be achieved by constructing the HACC estimator using the sample moment in mean deviation form. 3}, number={6}, journal={ECONOMETRICA}, author={Hall, AR}, year={2000}, month={Nov}, pages={1517–1527} } @article{fleissig_hall_seater_2000, title={Garp, separability, and the representative agent}, volume={4}, ISSN={["1365-1005"]}, DOI={10.1017/S1365100500016035}, abstractNote={We examine whether annual, quarterly, and monthly U.S. aggregate consumption data could have been generated by a utility-maximizing representative agent with intertemporally separable utility. The model appears inapplicable over the full time periods covered by the NIPA data, which are the sample periods often used in the literature. The model does appear applicable, however, over long subsamples. The data also are inconsistent with separability assumptions routinely made in the literature. In particular, the main categories of consumption (nondurables, services, and durables) are not mutually separable. We consider the implications of our results for inference about consumption based on the representative-agent model.}, number={3}, journal={MACROECONOMIC DYNAMICS}, author={Fleissig, AR and Hall, AR and Seater, JJ}, year={2000}, month={Sep}, pages={324–342} } @article{ghysels_guay_hall_1999, title={Predictive tests for structural change with unknown breakpoint (vol 82, pg 209, 1997)}, volume={90}, number={2}, journal={Journal of Econometrics}, author={Ghysels, E. and Guay, A. and Hall, A.}, year={1999}, pages={337–343} } @article{hall_1999, title={Structural stability testing in models estimated by generalized method of moments}, volume={17}, ISSN={["0735-0015"]}, DOI={10.2307/1392291}, number={3}, journal={JOURNAL OF BUSINESS & ECONOMIC STATISTICS}, author={Hall, AR}, year={1999}, month={Jul}, pages={335–348} } @article{ghysels_guay_hall_1998, title={Predictive tests for structural change with unknown breakpoint}, volume={82}, ISSN={["0304-4076"]}, DOI={10.1016/S0304-4076(97)00057-2}, abstractNote={This paper considers predictive tests for structural change in models estimated via Generalized Method of Moments. Our analysis extends earlier work by Ghysels and Hall (1990a) by allowing for the instability to occur at an unknown point in the sample. We analyse various statistics based on continuous mappings of the sequence of predictive test calculated for a set of possible breakpoints in the sample. The limiting distribution of these statistics is derived under both the null hypothesis and local alternatives. Percentiles are reported for the distribution under the null. A sideproduct of our analysis is that we can illuminate the power properties of the predictive test and also compare its properties to those of the Wald, LR and LM tests for parameter variation. We study those power properties both via local asymptotic analysis and Monte Carlo.}, number={2}, journal={JOURNAL OF ECONOMETRICS}, author={Ghysels, E and Guay, A and Hall, A}, year={1998}, month={Feb}, pages={209–233} } @misc{hall_1997, title={Periodicity and stochastic trends in economic time series. P H Franses}, volume={107}, number={444}, journal={Economic Journal}, author={Hall, A. R.}, year={1997}, pages={1602–1603} }