@article{rowe_asbell-clarke_baker_eagle_hicks_barnes_brown_edwards_2017, title={Assessing implicit science learning in digital games}, volume={76}, ISSN={["1873-7692"]}, DOI={10.1016/j.chb.2017.03.043}, abstractNote={Building on the promise shown in game-based learning research, this paper explores methods for Game-Based Learning Assessments (GBLA) using a variety of educational data mining techniques (EDM). GBLA research examines patterns of behaviors evident in game data logs for the measurement of implicit learning—the development of unarticulated knowledge that is not yet expressible on a test or formal assessment. This paper reports on the study of two digital games showing how the combination of human coding with EDM has enabled researchers to measure implicit learning of Physics. In the game Impulse, researchers combined human coding of video with educational data mining to create a set of automated detectors of students' implicit understanding of Newtonian mechanics. For Quantum Spectre, an optics puzzle game, human coding of Interaction Networks was used to identify common student errors. Findings show that several of our measures of student implicit learning within these games were significantly correlated with improvements in external postassessments. Methods and detailed findings were different for each type of game. These results suggest GBLA shows promise for future work such as adaptive games and in-class, data-driven formative assessments, but design of the assessment mechanics must be carefully crafted for each game.}, journal={COMPUTERS IN HUMAN BEHAVIOR}, author={Rowe, Elizabeth and Asbell-Clarke, Jodi and Baker, Ryan S. and Eagle, Michael and Hicks, Andrew G. and Barnes, Tiffany M. and Brown, Rebecca A. and Edwards, Teon}, year={2017}, month={Nov}, pages={617–630} } @article{eagle_barnes_2015, title={Exploring Missing Behaviors with Region-Level Interaction Network Coverage}, volume={9112}, ISBN={["978-3-319-19772-2"]}, ISSN={["0302-9743"]}, DOI={10.1007/978-3-319-19773-9_126}, abstractNote={We have used a complex network model of student-tutor interactions to derive high-level approaches to problem solving. We also have used interaction networks to evaluate between-group differences in student approaches, as well as for automatically producing both next-step and high-level hints. Students do not visit vertices within the networks uniformly; students from different experimental groups are expected to have different patterns of network exploration. In this work we explore the possibility of using frequency estimation to uncover locations in the network with differing amounts of student-saturation. Identification of these regions can be used to locate specific problem approaches and strategies that would be most improved by additional student-data, as well as provide a measure of confidence when comparing across networks or between groups.}, journal={ARTIFICIAL INTELLIGENCE IN EDUCATION, AIED 2015}, author={Eagle, Michael and Barnes, Tiffany}, year={2015}, pages={831–835} } @inproceedings{eagle_barnes_2014, title={Modeling student dropout in tutoring systems}, volume={8474}, DOI={10.1007/978-3-319-07221-0_104}, abstractNote={Intelligent tutors have been shown to be almost as effective as human tutors in supporting learning in many domains. However, the construction of intelligent tutors can be costly. One way to address this problem is to use previously collected data to generate models to provide intelligent feedback to otherwise non-personalized tutors. In this work, we explore how we can use previously collected data to build models of student dropout over time; we define dropout as ceasing to interact with the tutor before the completion of all required tasks. We use survival analysis, a statistical method of measuring time to event data, to model how long we can expect students to interact with a tutor. Future work will explore ways to use these models to to provide personalized feedback, with the goal of preventing students from dropping out.}, booktitle={Intelligent tutoring systems, its 2014}, author={Eagle, M. and Barnes, T.}, year={2014}, pages={676–678} } @inproceedings{eagle_barnes_2014, title={Survival analysis on duration data in intelligent tutors}, volume={8474}, DOI={10.1007/978-3-319-07221-0_22}, abstractNote={Effects such as student dropout and the non-normal distribution of duration data confound the exploration of tutor efficiency, time-in-tutor vs. tutor performance, in intelligent tutors. We use an accelerated failure time (AFT) model to analyze the effects of using automatically generated hints in Deep Thought, a propositional logic tutor. AFT is a branch of survival analysis, a statistical technique designed for measuring time-to-event data and account for participant attrition. We found that students provided with automatically generated hints were able to complete the tutor in about half the time taken by students who were not provided hints. We compare the results of survival analysis with a standard between-groups mean comparison and show how failing to take student dropout into account could lead to incorrect conclusions. We demonstrate that survival analysis is applicable to duration data collected from intelligent tutors and is particularly useful when a study experiences participant attrition.}, booktitle={Intelligent tutoring systems, its 2014}, author={Eagle, M. and Barnes, T.}, year={2014}, pages={178–187} }