@article{dominguez_goodwin_roberts_st amant_2017, title={Human Subtlety Proofs: Using Computer Games to Model Cognitive Processes for Cybersecurity}, volume={33}, ISSN={["1532-7590"]}, DOI={10.1080/10447318.2016.1232229}, abstractNote={ABSTRACT This article describes an emerging direction in the intersection between human–computer interaction and cognitive science: the use of cognitive models to give insight into the challenges of cybersecurity (cyber-SA). The article gives a brief overview of work in different areas of cyber-SA where cognitive modeling research plays a role, with regard to direct interaction between end users and computer systems and with regard to the needs of security analysts working behind the scenes. The problem of distinguishing between human users and automated agents (bots) interacting with computer systems is introduced, as well as ongoing efforts toward building Human Subtlety Proofs (HSPs), persistent and unobtrusive windows into human cognition with direct application to cyber-SA. Two computer games are described, proxies to illustrate different ways in which cognitive modeling can potentially contribute to the development of HSPs and similar cyber-SA applications.}, number={1}, journal={INTERNATIONAL JOURNAL OF HUMAN-COMPUTER INTERACTION}, author={Dominguez, Ignacio X. and Goodwin, Prairie Rose and Roberts, David L. and St Amant, Robert}, year={2017}, pages={44–54} } @article{mealin_dominguez_roberts_2016, title={Semi-supervised Classification of Static Canine Postures Using the Microsoft Kinect}, DOI={10.1145/2995257.3012024}, abstractNote={3D sensing hardware, such as the Microsoft Kinect, allows new interaction paradigms that would be difficult to accomplish with traditional RGB cameras alone. One basic step in realizing these new methods of animal-computer interaction is posture and behavior detection and classification. In this paper, we present a system capable of identifying static postures for canines that does not rely on hand-labeled data at any point during the process. We create a model of the canine based on measurements automatically obtained in from the first few captured frames, reducing the burden on users. We also present a preliminary evaluation of the system with five dogs, which shows that the system can identify the "standing," "sitting," and "lying" postures with approximately 70%, 69%, and 94% accuracy, respectively.}, journal={PROCEEDINGS OF THE THIRD INTERNATIONAL CONFERENCE ON ANIMAL-COMPUTER INTERACTION, ACI 2016}, author={Mealin, Sean and Dominguez, Ignacio X. and Roberts, David L.}, year={2016} } @article{dominguez_cardona-rivera_vance_roberts_2016, title={The Mimesis Effect: The Effect of Roles on Player Choice in Interactive Narrative Role-Playing Games}, DOI={10.1145/2858036.2858141}, abstractNote={We present a study that investigates the heretofore unexplored relationship between a player's sense of her narrative role in an interactive narrative role-playing game and the options she selects when faced with choice structures during gameplay. By manipulating a player's knowledge over her role, and examining in-game options she preferred in choice structures, we discovered what we term the Mimesis Effect: when players were explicitly given a role, we found a significant relationship between their role and their in-game actions; participants role-play even if not instructed to, exhibiting a preference for actions consistent with their role. Further, when players were not explicitly given a role, participants still role-played -- they were consistent with an implicit role -- but did not agree on which role to implicitly be consistent with. We discuss our findings and broader implications of our work to both game development and games research.}, journal={34TH ANNUAL CHI CONFERENCE ON HUMAN FACTORS IN COMPUTING SYSTEMS, CHI 2016}, author={Dominguez, Ignacio X. and Cardona-Rivera, Rogelio E. and Vance, James K. and Roberts, David L.}, year={2016}, pages={3438–3449} } @article{mealin_winters_dominguez_marrero-garcia_bozkurt_sherman_roberts_2015, title={Towards the Non-Visual Monitoring of Canine Physiology in Real-Time by Blind Handlers}, DOI={10.1145/2832932.2837018}, abstractNote={One of the challenges to working with canines is that whereas humans are primarily vocal communicators, canines are primarily postural and behavioral communicators. It can take years to gain some level of proficiency at reading canine body language, even under the best of circumstances. In the case of guide dogs and visually-impaired handlers, this task is even more difficult. Luckily, new technology designed to help monitor canines may prove useful in helping handlers, especially those with visual impairments, to better understand and interpret what their working partners are feeling or saying. In prior work a light-weight, wearable, wireless physiological monitoring system was shown to be accurate for measuring canines' heart and respiratory rates [6]. In this paper, we consider the complementary problem of communicating physiological information to handlers. We introduce two non-visual interfaces for monitoring a canine's heart and respiratory rates, an audio interface and a vibrotactile interface. We also present the results of two initial studies to evaluate the efficacy of the interfaces. In the first study we found that many participants were more confident in detecting changes in heart and respiratory rate using the audio interface, however most of the time they were just as accurate with the vibrotactile interface with only a slight increase in detection latency.}, journal={12TH ADVANCES IN COMPUTER ENTERTAINMENT TECHNOLOGY CONFERENCE (ACE15)}, author={Mealin, Sean and Winters, Mike and Dominguez, Ignacio X. and Marrero-Garcia, Michelle and Bozkurt, Alper and Sherman, Barbara L. and Roberts, David L.}, year={2015} }