@article{tabarsi_yasir_reichert_tian_gadireddy_dimarco_briceno_barnes_2025, title={Herald of Advancement, Disruption, or Both: A Systematic Literature Review on Student-Facing LLM Tools in Undergraduate Computing Education}, DOI={10.36227/techrxiv.176463808.80840600/v1}, abstractNote={The variety of help that large language models (LLMs) provide has made them popular among students across fields. Computing education has been particularly affected, as LLMs can handle coding tasks effectively and provide feedback. This has raised hop}, author={Tabarsi, Benyamin and Yasir, Tahreem and Reichert, Heidi and Tian, Xiaoyi and Gadireddy, Shiva and Dimarco, Clara and Briceno, Daniel and Barnes, Tiffany}, year={2025}, month={Dec} } @article{riahi_tian_limke_storozhevykh_cateté_barnes_lytle_singh_2025, title={SnapClass: An AI-Enhanced Classroom Management System for Block-Based Programming}, DOI={10.1109/vl-hcc65237.2025.00072}, abstractNote={Block-Based Programming (BBP) platforms, such as Snap!, have become increasingly prominent in $\mathrm{K}-12$ computer science education due to their ability to simplify programming concepts and foster computational thinking from an early age. While these platforms engage students through visual and gamified interfaces, teachers often face challenges in using them effectively and finding all the necessary features for classroom management. To address these challenges, we introduce SnapClass, a classroom management system integrated within the Snap! programming environment. SnapClass was iteratively developed drawing on established research about the pedagogical and logistical challenges teachers encounter in computing classrooms. Specifically, SnapClass allows educators to create and customize block-based coding assignments based on student skill levels, implement rubric-based auto-grading, and access student code history and recovery features. It also supports monitoring student engagement and idle time, and includes a help dashboard with a “raise hand” feature to assist students in real time. This paper describes the design and key features of SnapClass those are developed and those are under progress.}, author={Riahi, Bahare and Tian, Xiaoyi and Limke, Ally and Storozhevykh, Viktoriia and Cateté, Veronica and Barnes, Tiffany and Lytle, Nicholas and Singh, Khushbu}, year={2025}, month={Oct} } @article{tithi_ramesh_dimarco_tian_alam_fazeli_barnes_2025, title={The promise and limits of LLMs in constructing proofs and hints for logic problems in intelligent tutoring systems}, DOI={10.1016/j.caeai.2025.100490}, abstractNote={Intelligent tutoring systems have demonstrated effectiveness in teaching formal propositional logic proofs, but their reliance on template-based explanations limits their ability to provide personalized student feedback. While large language models (LLMs) offer promising capabilities for dynamic feedback generation, they risk producing hallucinations or pedagogically unsound explanations. We evaluated the stepwise accuracy of LLMs in constructing multi-step symbolic logic proofs, comparing six prompting techniques across four state-of-the-art LLMs on 358 propositional logic problems. Results show that DeepSeek-V3 achieved superior performance with upto 86.7 % accuracy on stepwise proof construction and excelled particularly in simpler rules. We further used the best-performing LLM to generate explanatory hints for 1050 unique student problem-solving states from a logic ITS and evaluated them on 4 criteria with both an LLM grader and human expert ratings on a 20 % sample. Our analysis finds that LLM-generated hints were 75 % accurate and rated highly by human evaluators on consistency and clarity, but did not perform as well in explaining why the hint was provided or its larger context. Our results demonstrate that LLMs may be used to augment tutoring systems with logic tutoring hints, but those hints require additional modifications to ensure accuracy and pedagogical appropriateness.}, journal={Computers and Education Artificial Intelligence}, author={Tithi, Sutapa Dey and Ramesh, Arun Kumar and DiMarco, Clara and Tian, Xiaoyi and Alam, Nazia and Fazeli, Kimia and Barnes, Tiffany}, year={2025}, month={Oct} } @article{limke_islam_riahi_tian_hill_cateté_barnes_2025, title={What Does It Take to Support Problem Solving in Programming Classrooms? A New Framework from the K-12 Teacher Perspective}, url={https://doi.org/10.1145/3706599.3719763}, DOI={10.1145/3706599.3719763}, journal={EXTENDED ABSTRACTS OF THE 2025 CHI CONFERENCE ON HUMAN FACTORS IN COMPUTING SYSTEMS, CHI 2025}, author={Limke, Ally and Islam, Saminur and Riahi, Bahare and Tian, Xiaoyi and Hill, Marnie and Cateté, Veronica and Barnes, Tiffany}, year={2025}, month={Apr} }