@article{foster_brugarolas_walker_mealin_cleghern_yuschak_clark_adin_russenberger_gruen_et al._2020, title={Preliminary Evaluation of a Wearable Sensor System for Heart Rate Assessment in Guide Dog Puppies}, volume={20}, ISSN={["1558-1748"]}, url={https://doi.org/10.1109/JSEN.2020.2986159}, DOI={10.1109/JSEN.2020.2986159}, abstractNote={This paper details the development of a novel wireless heart rate sensing system for puppies in training as guide dogs. The system includes a harness with on-board electrocardiography (ECG) front-end circuit, inertial measurement unit and a micro-computer with wireless capability where the major research focus of this paper was on the ergonomic design and evaluation of the system on puppies. The first phase of our evaluation was performed on a Labrador Retriever between 12 to 26 weeks in age as a pilot study. The longitudinal weekly data collected revealed the expected trend of a decreasing average heart rate and increased heart rate variability as the age increased. In the second phase, we improved the system ergonomics for a larger scale deployment in a guide dog school (Guiding Eyes for the Blind (Guiding Eyes)) on seventy 7.5-week-old puppies (heart rate coverage average of 86.7%). The acquired ECG based heart rate data was used to predict the performance of puppies in Guiding Eyes’s temperament test. We used the data as an input to a machine learning model which predicted two Behavior Checklist (BCL) scores as determined by expert Guiding Eyes puppy evaluators with an accuracy above 90%.}, number={16}, journal={IEEE SENSORS JOURNAL}, publisher={Institute of Electrical and Electronics Engineers (IEEE)}, author={Foster, Marc and Brugarolas, Rita and Walker, Katherine and Mealin, Sean and Cleghern, Zach and Yuschak, Sherrie and Clark, Julia Condit and Adin, Darcy and Russenberger, Jane and Gruen, Margaret and et al.}, year={2020}, pages={9449–9459} } @article{stefik_ladner_allee_mealin_2019, title={Computer Science Principles for Teachers of Blind and Visually Impaired Students}, DOI={10.1145/3287324.3287453}, abstractNote={The College Board's AP Computer Science Principles (CSP) content has become a major new course for introducing K-12 students to the discipline. The course was designed for many reasons, but one major goal was to broaden participation. While significant work has been completed toward equity by many research groups, we know of no systematic analysis of CSP content created by major vendors in relation to accessibility for students with disabilities, especially those who are blind or visually impaired. In this experience report, we discuss two major actions by our team to make CSP more accessible. First, with the help of accessibility experts and teachers, we modified the entire Code.org CSP course to make it accessible. Second, we conducted a one-week professional development workshop in the summer of 2018 for teachers of blind or visually impaired students in order to help them prepare to teach CSP or support those who do. We report here on lessons learned that are useful to teachers who have blind or visually impaired students in their classes, to AP CSP curriculum providers, and to the College Board.}, journal={SIGCSE '19: PROCEEDINGS OF THE 50TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION}, author={Stefik, Andreas and Ladner, Richard E. and Allee, William and Mealin, Sean}, year={2019}, pages={766–772} } @article{majikes_brugarolas_winters_yuschak_mealin_walker_yang_sherman_bozkurt_roberts_2017, title={Balancing noise sensitivity, response latency, and posture accuracy for a computer-assisted canine posture training system}, volume={98}, ISSN={["1095-9300"]}, DOI={10.1016/j.ijhcs.2016.04.010}, abstractNote={This paper describes a canine posture detection system composed of wearable sensors and instrumented devices that detect the postures sit, stand, and eat. The system consists of a customized harness outfitted with wearable Inertial Measurement Units (IMUs) and a base station for processing IMU data to classify canine postures. Research in operant conditioning, the science of behavior change, indicates that successful animal training requires consistent and accurate feedback on behavior. Properly designed computer systems excel at timeliness and accuracy, which are two characteristics most amateur trainers struggle with and professionals strive for. Therefore, in addition to the system being ergonomically designed to ensure the dog׳s comfort and well-being, it is engineered to provide posture detection with timing and accuracy on par with a professional trainer. We contend that providing a system with these characteristics will one day aid dogs in learning from humans by overcoming poor or ineffective timing during training. We present the initial steps in the development and validation of a computer-assisted training system designed to work outside of laboratory environments. The main contributions of this work are (a) to explore the trade-off between low-latency responses to changes in time-series IMU data representative of posture changes while maintaining accuracy and timing similar to a professional trainer, and (b) to provide a model for future ACI technologies by documenting the user-centered approach we followed to create a computer-assisted training system that met the criteria identified in (a). Accordingly, in addition to describing our system, we present the results of three experiments to characterize the performance of the system at capturing sit postures of dogs and providing timely reinforcement. These trade-offs are illustrated through the comparison of two algorithms. The first is Random Forest classification and the second is an algorithm which uses a Variance-based Threshold for classification of postures. Results indicate that with proper parameter tuning, our system can successfully capture and reinforce postures to provide computer-assisted training of dogs.}, journal={INTERNATIONAL JOURNAL OF HUMAN-COMPUTER STUDIES}, author={Majikes, John and Brugarolas, Rita and Winters, Michael and Yuschak, Sherrie and Mealin, Sean and Walker, Katherine and Yang, Pu and Sherman, Barbara and Bozkurt, Alper and Roberts, David L.}, year={2017}, month={Feb}, pages={179–195} } @article{mealin_dominguez_roberts_2016, title={Semi-supervised Classification of Static Canine Postures Using the Microsoft Kinect}, DOI={10.1145/2995257.3012024}, abstractNote={3D sensing hardware, such as the Microsoft Kinect, allows new interaction paradigms that would be difficult to accomplish with traditional RGB cameras alone. One basic step in realizing these new methods of animal-computer interaction is posture and behavior detection and classification. In this paper, we present a system capable of identifying static postures for canines that does not rely on hand-labeled data at any point during the process. We create a model of the canine based on measurements automatically obtained in from the first few captured frames, reducing the burden on users. We also present a preliminary evaluation of the system with five dogs, which shows that the system can identify the "standing," "sitting," and "lying" postures with approximately 70%, 69%, and 94% accuracy, respectively.}, journal={PROCEEDINGS OF THE THIRD INTERNATIONAL CONFERENCE ON ANIMAL-COMPUTER INTERACTION, ACI 2016}, author={Mealin, Sean and Dominguez, Ignacio X. and Roberts, David L.}, year={2016} } @inproceedings{majikes_mealin_rita_walker_yuschak_sherman_bozkurt_roberts_2016, title={Smart connected canines: IoT design considerations for the lab, home, and mission-critical environments (invited paper)}, DOI={10.1109/sarnof.2016.7846739}, abstractNote={The canine-human relationship continues to grow as dogs become an increasingly critical part of our society. As reliance on dogs has increased from simple companionship, to service dogs, urban security, and national defense, the opportunities for enhanced communications between the working canine and their handler increase. Wireless sensor networks and the Internet of Things (IoT) can extend traditional canine-human communication to integrate canines into the cyber-enabled world. This is what we call the Smart Connected Canine (SCC). Canine-computer interaction is sufficiently different from human-computer interaction so as to present some challenging research and design problems. There are physical and performance limits to what a dog will naturally tolerate. There are communications requirements for monitoring dogs, monitoring the environment, and for canine-human communications. Depending on the working environment there are different performance, security, and ergonomic considerations. This paper summarizes three example canine-human systems we presented earlier along with their Ion data characteristics and design criteria in order to explore how smart connected canines can improve our lives, the future of smart connected canines, and the requirements on IoT technologies to facilitate this future.}, booktitle={2016 ieee 37th sarnoff symposium}, author={Majikes, J. J. and Mealin, S. and Rita, B. and Walker, K. and Yuschak, S. and Sherman, B. and Bozkurt, A. and Roberts, D. L.}, year={2016}, pages={118–123} } @inproceedings{mealin_howell_roberts_2016, title={Towards unsupervised canine posture classification via depth shadow detection and infrared reconstruction for improved image segmentation accuracy}, volume={9793}, booktitle={Biomimetic and biohybrid systems, living machines 2016}, author={Mealin, S. and Howell, S. and Roberts, D. L.}, year={2016}, pages={155–166} } @article{winters_brugarolas_majikes_mealin_yuschak_sherman_bozkurt_roberts_2015, title={Knowledge Engineering for Unsupervised Canine Posture Detection from IMU Data}, DOI={10.1145/2832932.2837015}, abstractNote={Training animals is a process that requires a significant investment of time and energy on the part of the trainer. One of the most basic training tasks is to train dogs to perform postures on cue. While it might be easy for a human trainer to see when an animal has performed the desired posture, it is much more difficult for a computer to determine this. Most work in this area uses accelerometer and/or gyroscopic data to produce data from an animal's current state, but this has limitations. Take for example a normal standing posture. From an accelerometer's perspective, it closely resembles the "laying down" posture, but the posture can look very different if the animal is standing still, versus walking, versus running, and might look completely different from a "standing on incline" posture. A human trainer can instantly tell the difference between these postures and behaviors, but the process is much more difficult for a computer. This paper demonstrates several algorithms for recognizing canine postures, as well as a system for building a computational model of a canine's potential postures, based solely on skeletal measurements. Existing techniques use labeled data, which can be difficult to acquire. We contribute a new technique for unsupervised posture detection, and compare the supervised technique to our new, unsupervised technique. Results indicate that the supervised technique performs with a mean 82.06% accuracy, while our unsupervised approach achieves a mean 74.25% accuracy, indicating that in some cases, our new unsupervised technique is capable of achieving comparable performance.}, journal={12TH ADVANCES IN COMPUTER ENTERTAINMENT TECHNOLOGY CONFERENCE (ACE15)}, author={Winters, Michael and Brugarolas, Rita and Majikes, John and Mealin, Sean and Yuschak, Sherrie and Sherman, Barbara L. and Bozkurt, Alper and Roberts, David}, year={2015} } @article{mealin_winters_dominguez_marrero-garcia_bozkurt_sherman_roberts_2015, title={Towards the Non-Visual Monitoring of Canine Physiology in Real-Time by Blind Handlers}, DOI={10.1145/2832932.2837018}, abstractNote={One of the challenges to working with canines is that whereas humans are primarily vocal communicators, canines are primarily postural and behavioral communicators. It can take years to gain some level of proficiency at reading canine body language, even under the best of circumstances. In the case of guide dogs and visually-impaired handlers, this task is even more difficult. Luckily, new technology designed to help monitor canines may prove useful in helping handlers, especially those with visual impairments, to better understand and interpret what their working partners are feeling or saying. In prior work a light-weight, wearable, wireless physiological monitoring system was shown to be accurate for measuring canines' heart and respiratory rates [6]. In this paper, we consider the complementary problem of communicating physiological information to handlers. We introduce two non-visual interfaces for monitoring a canine's heart and respiratory rates, an audio interface and a vibrotactile interface. We also present the results of two initial studies to evaluate the efficacy of the interfaces. In the first study we found that many participants were more confident in detecting changes in heart and respiratory rate using the audio interface, however most of the time they were just as accurate with the vibrotactile interface with only a slight increase in detection latency.}, journal={12TH ADVANCES IN COMPUTER ENTERTAINMENT TECHNOLOGY CONFERENCE (ACE15)}, author={Mealin, Sean and Winters, Mike and Dominguez, Ignacio X. and Marrero-Garcia, Michelle and Bozkurt, Alper and Sherman, Barbara L. and Roberts, David L.}, year={2015} } @article{bozkurt_roberts_sherman_brugarolas_mealin_majikes_yang_loftin_2014, title={Toward Cyber-Enhanced Working Dogs for Search and Rescue}, volume={29}, ISSN={["1941-1294"]}, DOI={10.1109/mis.2014.77}, abstractNote={The authors introduce the fundamental building blocks for a cyber-enabled, computer-mediated communication platform to connect human and canine intelligence to achieve a new generation of Cyber-Enhanced Working Dog (CEWD). The use of monitoring technologies provides handlers with real-time information about the behavior and emotional state of their CEWDs and the environments they're working in for a more intelligent canine-human collaboration. From handler to dog, haptic feedback and auditory cues are integrated to provide remote command and feedback delivery. From dog to handler, multiple inertial measurement units strategically located on a harness are used to accurately detect posture and behavior, and concurrent noninvasive photoplethysmogram and electrocardiogram for physiological monitoring. The authors also discuss how CEWDs would be incorporated with a variety of other robotic and autonomous technologies to create next-generation intelligent emergency response systems. Using cyber-physical systems to supplement and augment the two-way information exchange between human handlers and dogs would amplify the remarkable sensory capacities of search and rescue dogs and help them save more lives.}, number={6}, journal={IEEE INTELLIGENT SYSTEMS}, author={Bozkurt, Alper and Roberts, David L. and Sherman, Barbara L. and Brugarolas, Rita and Mealin, Sean and Majikes, John and Yang, Pu and Loftin, Robert}, year={2014}, pages={32–39} } @inproceedings{mealin_murphy-hill_2012, title={An exploratory study of blind software developers}, DOI={10.1109/vlhcc.2012.6344485}, abstractNote={As a research community, we currently know very little about the challenges faced by blind software developers. Without knowing what those challenges are, the community cannot effectively address these challenges. In this paper, we describe the first exploratory empirical study, where we conducted eight interviews with blind software developers to identify aspects of software development that are a challenge. Our results suggest that visually impaired software developers face challenges, for instance, when using screen readers to look up information when writing code. We discuss a variety of implications, including that blind software developers need additional support in discovering relevant software development tools.}, booktitle={2012 IEEE symposium on visual languages and human-centric computing (vl/hcc)}, author={Mealin, S. and Murphy-Hill, E.}, year={2012}, pages={71–74} }