@article{wright_meade_2012, title={An exploration of cognitive ability contamination in the Implicit Association Test methodology}, volume={28}, ISSN={["1873-7692"]}, DOI={10.1016/j.chb.2011.10.009}, abstractNote={The purpose of this study was to explore the relationship between scores on the Implicit Association Test (IAT) and cognitive ability. This relationship was investigated by examining the relationship between two different IATs, a cognitive ability test, and learning outcomes following a short training module. Results demonstrated that IATs scored with the D scoring algorithm were not significantly related to cognitive ability test scores, and were not related to post-training learning outcomes. However, IATs scored with the conventional scoring algorithm were significantly negatively related to cognitive ability, and the two IATs used in the study were significantly correlated with one another regardless of which scoring method was used.}, number={2}, journal={COMPUTERS IN HUMAN BEHAVIOR}, author={Wright, Natalie A. and Meade, Adam W.}, year={2012}, month={Mar}, pages={393–399} } @article{meade_wright_2012, title={Solving the Measurement Invariance Anchor Item Problem in Item Response Theory}, volume={97}, ISSN={["0021-9010"]}, DOI={10.1037/a0027934}, abstractNote={The efficacy of tests of differential item functioning (measurement invariance) has been well established. It is clear that when properly implemented, these tests can successfully identify differentially functioning (DF) items when they exist. However, an assumption of these analyses is that the metric for different groups is linked using anchor items that are invariant. In practice, however, it is impossible to be certain which items are DF and which are invariant. This problem of anchor items, or referent indicators, has long plagued invariance research, and a multitude of suggested approaches have been put forth. Unfortunately, the relative efficacy of these approaches has not been tested. This study compares 11 variations on 5 qualitatively different approaches from recent literature for selecting optimal anchor items. A large-scale simulation study indicates that for nearly all conditions, an easily implemented 2-stage procedure recently put forth by Lopez Rivas, Stark, and Chernyshenko (2009) provided optimal power while maintaining nominal Type I error. With this approach, appropriate anchor items can be easily and quickly located, resulting in more efficacious invariance tests. Recommendations for invariance testing are illustrated using a pedagogical example of employee responses to an organizational culture measure.}, number={5}, journal={JOURNAL OF APPLIED PSYCHOLOGY}, author={Meade, Adam W. and Wright, Natalie A.}, year={2012}, month={Sep}, pages={1016–1031} }