@article{behrend_toaddy_thompson_sharek_2012, title={The effects of avatar appearance on interviewer ratings in virtual employment interviews}, volume={28}, ISSN={["0747-5632"]}, DOI={10.1016/j.chb.2012.06.017}, abstractNote={It is increasingly common for people engaging in computer–mediated interactions to be accompanied by a digital avatar that represents them. Little is known, however, about how these avatars influence others’ impressions. We examine this question in the context of employment interviews. It is well known that attractive job candidates are afforded an advantage in traditional face-to-face job interviews. We investigate whether raters evaluating computer–mediated interviews will follow a similar pattern when a digital avatar represents the candidate. To investigate this question, we asked 374 raters to view an interview transcript that was accompanied by either a male or female avatar, applying for either a male or female gender-typed job. We found that candidates with more attractive avatars received more favorable interview ratings, regardless of job gender type. These findings support the notion that the “what is beautiful is good” stereotype influences interview ratings even in computer-mediated interviews; raters automatically apply the same heuristics to digital and non-digital faces.}, number={6}, journal={COMPUTERS IN HUMAN BEHAVIOR}, author={Behrend, Tara and Toaddy, Steven and Thompson, Lori Foster and Sharek, David J.}, year={2012}, month={Nov}, pages={2128–2133} } @article{behrend_sharek_meade_wiebe_2011, title={The viability of crowdsourcing for survey research}, volume={43}, ISSN={1554-3528}, url={http://dx.doi.org/10.3758/s13428-011-0081-0}, DOI={10.3758/s13428-011-0081-0}, abstractNote={Online contract labor portals (i.e., crowdsourcing) have recently emerged as attractive alternatives to university participant pools for the purposes of collecting survey data for behavioral research. However, prior research has not provided a thorough examination of crowdsourced data for organizational psychology research. We found that, as compared with a traditional university participant pool, crowdsourcing respondents were older, were more ethnically diverse, and had more work experience. Additionally, the reliability of the data from the crowdsourcing sample was as good as or better than the corresponding university sample. Moreover, measurement invariance generally held across these groups. We conclude that the use of these labor portals is an efficient and appropriate alternative to a university participant pool, despite small differences in personality and socially desirable responding across the samples. The risks and advantages of crowdsourcing are outlined, and an overview of practical and ethical guidelines is provided.}, number={3}, journal={Behavior Research Methods}, publisher={Springer Science and Business Media LLC}, author={Behrend, Tara S. and Sharek, David J. and Meade, Adam W. and Wiebe, Eric N.}, year={2011}, month={Mar}, pages={800–813} }