@article{presler-marshall_heckman_stolee_2023, title={Improving Grading Outcomes in Software Engineering Projects Through Automated Contributions Summaries}, ISSN={["2832-756X"]}, DOI={10.1109/ICSE-SEET58685.2023.00030}, abstractNote={Teaming is a key aspect of most professional software engineering positions, and consequently, team-based learning (TBL) features heavily in many undergraduate computer science (CS) and software engineering programs. However, while TBL offers many pedagogical benefits, it is not without challenges. One such challenge is assessment, as the course teaching staff must be able to accurately identify individual students’ contributions to both encourage and reward participation. In this paper, we study improvements to grading practises in the context of a CS1.5 introductory software engineering course, where assessing individual students’ contributions to weekly lab assignments is done manually by teaching assistants (TAs). We explore the impact of presenting TAs with automated summaries of individual student contributions to their team’s GitHub repository. To do so, we propose a novel algorithm, and implement a tool based off of it, AutoVCS. We measure the impact on grading metrics in terms of grading speed, grading consistency, and TA satisfaction. We evaluate our algorithm, as implemented in AutoVCS, in a controlled experimental study on Java-based lab assignments from a recent offering of NC State University’s CS1.5 course. We find our automated summaries help TAs grade more consistently and provides students with more actionable feedback. Although TAs grade no faster using automated summaries, they nonetheless strongly prefer grading with the support of them than without. We conclude with recommendations for future work to explore improving consistency in contribution grading for student software engineering teams.}, journal={2023 IEEE/ACM 45TH INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING-SOFTWARE ENGINEERING EDUCATION AND TRAINING, ICSE-SEET}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2023}, pages={259–270} } @article{bai_presler-marshall_price_stolee_2022, title={Check It Off: Exploring the Impact of a Checklist Intervention on the Quality of Student-authored Unit Tests}, DOI={10.1145/3502718.3524799}, abstractNote={Software testing is an essential skill for computer science students. Prior work reports that students desire support in determining what code to test and which scenarios should be tested. In response to this, we present a lightweight testing checklist that contains both tutorial information and testing strategies to guide students in what and how to test. To assess the impact of the testing checklist, we conducted an experimental, controlled A/B study with 32 undergraduate and graduate students. The study task was writing a test suite for an existing program. Students were given either the testing checklist (the experimental group) or a tutorial on a standard coverage tool with which they were already familiar (the control group). By analyzing the combination of student-written tests and survey responses, we found students with the checklist performed as well as or better than the coverage tool group, suggesting a potential positive impact of the checklist (or at minimum, a non-negative impact). This is particularly noteworthy given the control condition of the coverage tool is the state of the practice. These findings suggest that the testing tool support does not need to be sophisticated to be effective.}, journal={PROCEEDINGS OF THE 27TH ACM CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2022, VOL 1}, author={Bai, Gina R. and Presler-Marshall, Kai and Price, Thomas W. and Stolee, Kathryn T.}, year={2022}, pages={276–282} } @article{presler-marshall_heckman_stolee_2022, title={Identifying Struggling Teams in Software Engineering Courses ThroughWeekly Surveys}, DOI={10.1145/3478431.3499367}, abstractNote={Teaming is increasingly a core aspect of professional software engineering and most undergraduate computer science curricula. At NC State University, we teach communication and project-management skills explicitly through a junior-level software engineering course. However, some students may have a dysfunctional team experience that imperils their ability to learn these skills. Identifying these teams during a team project is important so the teaching staff can intervene early and hopefully alleviate the issues. We propose a weekly reflection survey to help the course teaching staff proactively identify teams that may not be on track to learn the course outcomes. The questions on the survey focus on team communication and collaboration over the previous week. We evaluate our survey on two semesters of the undergraduate software engineering course by comparing teams with poor end-of-project grades or peer evaluations against teams flagged on a weekly basis through the surveys. We find that the survey can identify most teams that later struggled on the project, typically by the half-way mark of the project, and thus may provide instructors with an actionable early-warning about struggling teams. Furthermore, a majority of students (64.4%) found the survey to be a helpful tool for keeping their team on track. Finally, we discuss future work for improving the survey and engaging with student teams.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2022}, pages={126–132} } @article{presler-marshall_heckman_stolee_2021, title={SQLRepair: Identifying and Repairing Mistakes in Student-Authored SQL Queries}, DOI={10.1109/ICSE-SEET52601.2021.00030}, abstractNote={Computer science educators seek to understand the types of mistakes that students make when learning a new (programming) language so that they can help students avoid those mistakes in the future. While educators know what mistakes students regularly make in languages such as C and Python, students struggle with SQL and regularly make mistakes when working with it. We present an analysis of mistakes that students made when first working with SQL, classify the types of errors introduced, and provide suggestions on how to avoid them going forward. In addition, we present an automated tool, SQLRepair, that is capable of repairing errors introduced by undergraduate programmers when writing SQL queries. Our results show that students find repairs produced by our tool comparable in understandability to queries written by themselves or by other students, suggesting that SQL repair tools may be useful in an educational context. We also provide to the community a benchmark of SQL queries written by the students in our study that we used for evaluation of SQLRepair.}, journal={2021 IEEE/ACM 43RD INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING: JOINT TRACK ON SOFTWARE ENGINEERING EDUCATION AND TRAINING (ICSE-JSEET 2021)}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2021}, pages={199–210} } @article{presler-marshall_2021, title={Towards Better Support for Undergraduate Software Engineering Teams}, DOI={10.1145/3446871.3469773}, abstractNote={Team-based projects are increasingly used within software engineering education because they can teach valuable communication and collaboration skills to help prepare students for professional software engineering positions. However, team-based projects are not without their downsides: in particular, poor communication or a lack of participation can endanger the success of the project. We propose identifying metrics and building a predictive model to help instructors detect when teams are facing harmful dynamics, and evaluations to assess the metrics and their impact on teams in undergraduate software engineering courses.}, journal={ICER 2021: PROCEEDINGS OF THE 17TH ACM CONFERENCE ON INTERNATIONAL COMPUTING EDUCATION RESEARCH}, author={Presler-Marshall, Kai}, year={2021}, pages={405–406} }