@article{zhang_hutt_ocumpaugh_henderson_goslen_rowe_boyer_wiebe_mott_lester_2022, title={Investigating Student Interest and Engagement in Game-Based Learning Environments}, volume={13355}, ISBN={["978-3-031-11643-8"]}, ISSN={["1611-3349"]}, DOI={10.1007/978-3-031-11644-5_72}, abstractNote={As a cognitive and affective state, interest promotes engagement, facilitates self-regulated learning, and is positively associated with learning outcomes. Research has shown that interest interacts with prior knowledge, but few studies have investigated these issues in the context of adaptive game-based learning environments. Using three subscales from the User Engagement Scale, we examine data from middle school students (N = 77) who interacted with Crystal Island in their regular science class to explore the relationship between interest, knowledge, and learning. We found that interest is significantly related to performance (both knowledge assessment and game completion), suggesting that students with high interest are likely to perform better academically, but also be more engaged in the in-game objectives. These findings have implications both for designers who seek to identify students with lower interest and for those who hope to create adaptive supports.}, journal={ARTIFICIAL INTELLIGENCE IN EDUCATION, PT I}, author={Zhang, Jiayi and Hutt, Stephen and Ocumpaugh, Jaclyn and Henderson, Nathan and Goslen, Alex and Rowe, Jonathan P. and Boyer, Kristy Elizabeth and Wiebe, Eric and Mott, Bradford and Lester, James}, year={2022}, pages={711–716} } @article{goslen_carpenter_rowe_henderson_azevedo_lester_2022, title={Leveraging Student Goal Setting for Real-Time Plan Recognition in Game-Based Learning}, volume={13355}, ISBN={["978-3-031-11643-8"]}, ISSN={["1611-3349"]}, DOI={10.1007/978-3-031-11644-5_7}, abstractNote={Goal setting and planning are integral components of self-regulated learning. Many students struggle to set meaningful goals and build relevant plans. Adaptive learning environments show significant potential for scaffolding students' goal setting and planning processes. An important requirement for such scaffolding is the ability to perform student plan recognition, which involves recognizing students' goals and plans based upon the observations of their problem-solving actions. We introduce a novel plan recognition framework that leverages trace log data from student interactions within a game-based learning environment called CRYSTAL ISLAND, in which students use a drag-and-drop planning support tool that enables them to externalize their science problem-solving goals and plans prior to enacting them in the learning environment. We formalize student plan recognition in terms of two complementary tasks: (1) classifying students' selected problem-solving goals, and (2) classifying the sequences of actions that students indicate will achieve their goals. Utilizing trace log data from 144 middle school students' interactions with CRYSTAL ISLAND, we evaluate a range of machine learning models for student goal and plan recognition. All machine learning-based techniques outperform the majority baseline, with LSTMs outperforming other models for goal recognition and naive Bayes performing best for plan recognition. Results show the potential for automatically recognizing students' problem-solving goals and plans in game-based learning environments, which has implications for providing adaptive support for student self-regulated learning.}, journal={ARTIFICIAL INTELLIGENCE IN EDUCATION, PT I}, author={Goslen, Alex and Carpenter, Dan and Rowe, Jonathan P. and Henderson, Nathan and Azevedo, Roger and Lester, James}, year={2022}, pages={78–89} } @article{henderson_min_rowe_lester_2021, title={Enhancing Multimodal Affect Recognition with Multi-Task Affective Dynamics Modeling}, ISSN={["2156-8103"]}, DOI={10.1109/ACII52823.2021.9597432}, abstractNote={Accurately recognizing students’ affective states is critical for enabling adaptive learning environments to promote engagement and enhance learning outcomes. Multimodal approaches to student affect recognition capture multi-dimensional patterns of student behavior through the use of multiple data channels. An important factor in multimodal affect recognition is the context in which affect is experienced and exhibited. In this paper, we present a multimodal, multitask affect recognition framework that predicts students’ future affective states as auxiliary training tasks and uses prior affective states as input features to capture bi-directional affective dynamics and enhance the training of affect recognition models. Additionally, we investigate cross-stitch networks to maintain parameterized separation between shared and task-specific representations and task-specific uncertainty-weighted loss functions for contextual modeling of student affective states. We evaluate our approach using interaction and posture data captured from students engaged with a game-based learning environment for emergency medical training. Results indicate that the affective dynamics-based approach yields significant improvements in multimodal affect recognition across four different affective states.}, journal={2021 9TH INTERNATIONAL CONFERENCE ON AFFECTIVE COMPUTING AND INTELLIGENT INTERACTION (ACII)}, author={Henderson, Nathan and Min, Wookhee and Rowe, Jonathan and Lester, James}, year={2021} } @article{emerson_henderson_min_rowe_minogue_lester_2021, title={Multimodal Trajectory Analysis of Visitor Engagement with Interactive Science Museum Exhibits}, volume={12749}, ISBN={["978-3-030-78269-6"]}, ISSN={["1611-3349"]}, DOI={10.1007/978-3-030-78270-2_27}, abstractNote={Recent years have seen a growing interest in investigating visitor engagement in science museums with multimodal learning analytics. Visitor engagement is a multidimensional process that unfolds temporally over the course of a museum visit. In this paper, we introduce a multimodal trajectory analysis framework for modeling visitor engagement with an interactive science exhibit for environmental sustainability. We investigate trajectories of multimodal data captured during visitor interactions with the exhibit through slope-based time series analysis. Utilizing the slopes of the time series representations for each multimodal data channel, we conduct an ablation study to investigate how additional modalities lead to improved accuracy while modeling visitor engagement. We are able to enhance visitor engagement models by accounting for varying levels of visitors’ science fascination, a construct integrating science interest, curiosity, and mastery goals. The results suggest that trajectory-based representations of the multimodal visitor data can serve as the foundation for visitor engagement modeling to enhance museum learning experiences.}, journal={ARTIFICIAL INTELLIGENCE IN EDUCATION (AIED 2021), PT II}, author={Emerson, Andrew and Henderson, Nathan and Min, Wookhee and Rowe, Jonathan and Minogue, James and Lester, James}, year={2021}, pages={151–155} } @article{henderson_rowe_mott_brawner_baker_lester_2019, title={4D Affect Detection: Improving Frustration Detection in Game-Based Learning with Posture-Based Temporal Data Fusion}, volume={11625}, ISBN={["978-3-030-23203-0"]}, ISSN={["1611-3349"]}, DOI={10.1007/978-3-030-23204-7_13}, abstractNote={Recent years have seen growing interest in utilizing sensors to detect learner affect. Modeling frustration has particular significance because of its central role in learning. However, sensor-based affect detection poses important challenges. Motion-tracking cameras produce vast streams of spatial and temporal data, but relatively few systems have harnessed this data successfully to produce accurate run-time detectors of learner frustration outside of the laboratory. In this paper, we introduce a data-driven framework that leverages spatial and temporal posture data to detect learner frustration using deep neural network-based data fusion techniques. To train and validate the detectors, we utilize posture data collected with Microsoft Kinect sensors from students interacting with a game-based learning environment for emergency medical training. Ground-truth labels of learner frustration were obtained using the BROMP quantitative observation protocol. Results show that deep neural network-based late fusion techniques that combine spatial and temporal data yield significant improvements to frustration detection relative to baseline models.}, journal={ARTIFICIAL INTELLIGENCE IN EDUCATION (AIED 2019), PT I}, author={Henderson, Nathan L. and Rowe, Jonathan P. and Mott, Bradford W. and Brawner, Keith and Baker, Ryan and Lester, James C.}, year={2019}, pages={144–156} }