@article{bai_sthapit_heckman_price_stolee_2023, title={An Experience Report on Introducing Explicit Strategies into Testing Checklists for Advanced Beginners}, url={https://doi.org/10.1145/3587102.3588781}, DOI={10.1145/3587102.3588781}, abstractNote={Software testing is a critical skill for computing students, but learning and practicing testing can be challenging, particularly for beginners. A recent study suggests that a lightweight testing checklist that contains testing strategies and tutorial information could assist students in writing quality tests. However, students expressed a desire for more support in knowing how to test the code/scenario. Moreover, the potential costs and benefits of the testing checklist are not yet examined in a classroom setting. To that end, we improved the checklist by integrating explicit testing strategies to it (ETS Checklist), which provide step-by-step guidance on how to transfer semantic information from instructions to the possible testing scenarios. In this paper, we report our experiences in designing explicit strategies in unit testing, as well as adapting the ETS Checklist as optional tool support in a CS1.5 course. With the quantitative and qualitative analysis of the survey responses and lab assignment submissions generated by students, we discuss students' engagement with the ETS Checklists. Our results suggest that students who used the checklist intervention had significantly higher quality in their student-authored test code, in terms of code coverage, compared to those who did not, especially for assignments earlier in the course. We also observed students' unawareness of their need for help in writing high-quality tests.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL 1}, author={Bai, Gina R. and Sthapit, Sandeep and Heckman, Sarah and Price, Thomas W. and Stolee, Kathryn T.}, year={2023}, pages={194–200} } @article{bai_sthapit_heckman_price_stolee_2023, title={An Experience Report on Introducing Explicit Strategies into Testing Checklists for Advanced Beginners}, author={Bai, Gina R and Sthapit, Sandeep and Heckman, Sarah and Price, Thomas W and Stolee, Kathryn T}, year={2023} } @article{gitinabard_gao_heckman_barnes_lynch_others_2023, title={Analysis of Student Pair Teamwork Using GitHub Activities}, volume={15}, number={1}, journal={Journal of Educational Data Mining}, author={Gitinabard, Niki and Gao, Zhikai and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F and others}, year={2023}, pages={32–62} } @article{zahn_gransbury_heckman_battestilli_2023, title={Assessment of Self-Identified Learning Struggles in CS2 Programming Assignments}, url={https://doi.org/10.1145/3587102.3588786}, DOI={10.1145/3587102.3588786}, abstractNote={Students can have widely varying experiences while working on CS2 coding projects. Challenging experiences can lead to lower motivation and less success in completing these assignments. In this paper, we identify the common struggles CS2 students face while working on course projects and examine whether or not there is evidence of improvement in these areas of struggle between projects. While previous work has been conducted on understanding the importance of self-regulated learning to student success, it has not been fully investigated in the scope of CS2 coursework. We share our observations on investigating student struggles while working on coding projects through their self-reported response to a project reflection form. We apply emergent coding to identify student struggles at three points during the course and compare them against student actions in the course, such as project start times and office hours participation, to identify if students were overcoming these struggles. Through our coding and analysis we have found that while a majority of students encounter struggles with time management and debugging of failing tests, students tend to emphasize wanting to improve their time management skills in future coding assignments.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL 1}, author={Zahn, Matthew and Gransbury, Isabella and Heckman, Sarah and Battestilli, Lina}, year={2023}, pages={264–270} } @inproceedings{mcgill_heckman_chytas_diaz_liut_kazakova_sanusi_shah_szabo_2023, title={Building Recommendations for Conducting Equity-Focused, High Quality K-12 Computer Science Education Research}, url={https://doi.org/10.1145/3587103.3594207}, DOI={10.1145/3587103.3594207}, abstractNote={To investigate and identify promising practices in equitable K-12 computer science (CS) education, the capacity for education researchers to conduct this research must be rapidly built globally. Simultaneously, concerns have arisen over the last few years about the quality of research that is being conducted and the lack of equity-focused research.}, booktitle={Proceedings of the 2023 Conference on Innovation and Technology in Computer Science Education V. 2}, author={McGill, Monica M. and Heckman, Sarah and Chytas, Christos and Diaz, Lien and Liut, Michael and Kazakova, Vera and Sanusi, Ismaila Temitayo and Shah, Selina Marianna and Szabo, Claudia}, year={2023}, month={Jun}, pages={565–566} } @inproceedings{mcgill_thompson_gransbury_heckman_rosato_delyser_2023, title={Building upon the CAPE Framework for Broader Understanding of Capacity in K-12 CS Education}, url={https://doi.org/10.1145/3545945.3569799}, DOI={10.1145/3545945.3569799}, abstractNote={Research Problem. The CAPE Framework has been used in multiple studies to situate capacity-building efforts within schools to offer equitable student access to and participation in K-12 computer science (CS) education. CAPE defines four major components of capacity, access, participation and experience. However, to define what each of the CAPE components can entail, well-defined subcomponents are needed.}, booktitle={Proceedings of the 54th ACM Technical Symposium on Computer Science Education V. 1}, author={McGill, Monica M. and Thompson, Angelica and Gransbury, Isabella and Heckman, Sarah and Rosato, Jennifer and Delyser, Leigh Ann}, year={2023}, month={Mar}, pages={577–582} } @article{presler-marshall_heckman_stolee_2023, title={Improving Grading Outcomes in Software Engineering Projects Through Automated Contributions Summaries}, ISSN={["2832-756X"]}, DOI={10.1109/ICSE-SEET58685.2023.00030}, abstractNote={Teaming is a key aspect of most professional software engineering positions, and consequently, team-based learning (TBL) features heavily in many undergraduate computer science (CS) and software engineering programs. However, while TBL offers many pedagogical benefits, it is not without challenges. One such challenge is assessment, as the course teaching staff must be able to accurately identify individual students’ contributions to both encourage and reward participation. In this paper, we study improvements to grading practises in the context of a CS1.5 introductory software engineering course, where assessing individual students’ contributions to weekly lab assignments is done manually by teaching assistants (TAs). We explore the impact of presenting TAs with automated summaries of individual student contributions to their team’s GitHub repository. To do so, we propose a novel algorithm, and implement a tool based off of it, AutoVCS. We measure the impact on grading metrics in terms of grading speed, grading consistency, and TA satisfaction. We evaluate our algorithm, as implemented in AutoVCS, in a controlled experimental study on Java-based lab assignments from a recent offering of NC State University’s CS1.5 course. We find our automated summaries help TAs grade more consistently and provides students with more actionable feedback. Although TAs grade no faster using automated summaries, they nonetheless strongly prefer grading with the support of them than without. We conclude with recommendations for future work to explore improving consistency in contribution grading for student software engineering teams.}, journal={2023 IEEE/ACM 45TH INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING-SOFTWARE ENGINEERING EDUCATION AND TRAINING, ICSE-SEET}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2023}, pages={259–270} } @inproceedings{gao_lynch_heckman_2023, title={Too long to wait and not much to do: Modeling student behaviors while waiting for help in online office hours.}, booktitle={Proceedings of the 7th Educational Data Mining in Computer Science Education (CSEDM) Workshop}, author={Gao, Zhikai and Lynch, Collin and Heckman, Sarah}, year={2023} } @article{heckman_carver_sherriff_al-zubidy_2022, title={A Systematic Literature Review of Empiricism and Norms of Reporting in Computing Education Research Literature}, url={https://doi.org/10.1145/3470652}, DOI={10.1145/3470652}, abstractNote={Computing Education Research (CER) is critical for supporting the increasing number of students who need to learn computing skills. To systematically advance knowledge, publications must be clear enough to support replications, meta-analyses, and theory-building. The goal of this study is to characterize the reporting of empiricism in CER literature by identifying whether publications include information to support replications, meta-analyses, and theory building. The research questions are: RQ1) What percentage of papers in CER venues have empirical evaluation? RQ2) What are the characteristics of the empirical evaluation? RQ3) Do the papers with empirical evaluation follow reporting norms (both for inclusion and for labeling of key information)? We conducted an SLR of 427 papers published during 2014 and 2015 in five CER venues: SIGCSE TS, ICER, ITiCSE, TOCE, and CSE. We developed and applied the CER Empiricism Assessment Rubric. Over 80% of papers had some form of empirical evaluation. Quantitative evaluation methods were the most frequent. Papers most frequently reported results on interventions around pedagogical techniques, curriculum, community, or tools. There was a split in papers that had some type of comparison between an intervention and some other data set or baseline. Many papers lacked properly reported research objectives, goals, research questions, or hypotheses, description of participants, study design, data collection, and threats to validity. CER authors are contributing empirical results to the literature; however, not all norms for reporting are met. We encourage authors to provide clear, labeled details about their work so readers can use the methodologies and results for replications and meta-analyses. As our community grows, our reporting of CER should mature to help establish computing education theory to support the next generation of computing learners.}, journal={ACM Transactions on Computing Education}, author={Heckman, Sarah and Carver, Jeffrey C. and Sherriff, Mark and Al-zubidy, Ahmed}, year={2022}, month={Mar} } @inproceedings{battestilli_zahn_heckman_2022, title={Academic Help Seeking Patterns in Introductory Computer Science Courses}, booktitle={2022 ASEE Annual Conference & Exposition}, author={Battestilli, Lina and Zahn, Matthew and Heckman, Sarah}, year={2022} } @inproceedings{heckman_minnes_2022, title={Academic Middle Management: Undergraduate Leadership in Computing Programs}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education V. 2}, author={Heckman, Sarah and Minnes, Mia}, year={2022}, pages={1184–1184} } @inproceedings{gao_erickson_xu_lynch_heckman_barnes_2022, title={Admitting you have a problem is the first step: Modeling when and why students seek help in programming assignments}, booktitle={Proceedings of the 15th International Conference on Educational Data Mining, A. Mitrovic and N. Bosch, Eds. International Educational Data Mining Society, Durham, United Kingdom}, author={Gao, Zhikai and Erickson, Bradley and Xu, Yiqiao and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany}, year={2022}, pages={508–514} } @article{erickson_heckman_lynch_2022, title={Characterizing Student Development Progress: Validating Student Adherence to Project Milestones}, DOI={10.1145/3478431.3499373}, abstractNote={As enrollment in CS programs have risen, it has become increasingly difficult for teaching staff to provide timely and detailed guidance on student projects. To address this, instructors use automated assessment tools to evaluate students' code and processes as they work. Even with automation, understanding students' progress, and more importantly, if students are making the 'right' progress toward the solution is challenging at scale. To help students manage their time and learn good software engineering processes, instructors may create intermediate deadlines, or milestones, to support progress. However, student's adherence to these processes is opaque and may hinder student success and instructional support. Better understanding of how students follow process guidance in practice is needed to identify the right assignment structures to support development of high-quality process skills.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Erickson, Bradley and Heckman, Sarah and Lynch, Collin F.}, year={2022}, pages={15–21} } @inproceedings{erickson_heckman_lynch_2022, title={Characterizing Student Development Progress: Validating Student Adherence to Project Milestones}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Erickson, Bradley and Heckman, Sarah and Lynch, Collin F}, year={2022}, pages={15–21} } @article{gitinabard_heckman_barnes_lynch_2022, title={Designing a Dashboard for Student Teamwork Analysis}, DOI={10.1145/3478431.3499377}, abstractNote={Classroom dashboards are designed to help instructors effectively orchestrate classrooms by providing summary statistics, activity tracking, and other information. Existing dashboards are generally specific to an LMS or platform and they generally summarize individual work, not group behaviors. However, CS courses typically involve constellations of tools and mix on- and offline collaboration. Thus, cross-platform monitoring of individuals and teams is important to develop a full picture of the class. In this work, we describe our work on Concert, a data integration platform that collects data about student activities from several sources such as Piazza, My Digital Hand, and GitHub and uses it to support classroom monitoring through analysis and visualizations. We discuss team visualizations that we have developed to support effective group management and to help instructors identify teams in need of intervention.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2022}, pages={446–452} } @inproceedings{gitinabard_heckman_barnes_lynch_2022, title={Designing a dashboard for student teamwork analysis}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2022}, pages={446–452} } @inproceedings{mannekote_celepkolu_galdo_boyer_israel_heckman_stephens-martinez_2022, title={Don't Just Paste Your Stacktrace: Shaping Discussion Forums in Introductory CS Courses}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education V. 2}, author={Mannekote, Amogh and Celepkolu, Mehmet and Galdo, Aisha Chung and Boyer, Kristy Elizabeth and Israel, Maya and Heckman, Sarah and Stephens-Martinez, Kristin}, year={2022}, pages={1164–1164} } @article{presler-marshall_heckman_stolee_2022, title={Identifying Struggling Teams in Software Engineering Courses ThroughWeekly Surveys}, DOI={10.1145/3478431.3499367}, abstractNote={Teaming is increasingly a core aspect of professional software engineering and most undergraduate computer science curricula. At NC State University, we teach communication and project-management skills explicitly through a junior-level software engineering course. However, some students may have a dysfunctional team experience that imperils their ability to learn these skills. Identifying these teams during a team project is important so the teaching staff can intervene early and hopefully alleviate the issues.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2022}, pages={126–132} } @inproceedings{presler-marshall_heckman_stolee_2022, title={Identifying struggling teams in software engineering courses through weekly surveys}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2022}, pages={126–132} } @inproceedings{zahn_heckman_2022, title={Observations on Student Help-Seeking Behaviors in Introductory Computer Science Courses}, url={https://doi.org/10.1145/3545947.3576325}, DOI={10.1145/3545947.3576325}, abstractNote={The help-seeking interactions faculty encounter will vary depending upon the course structure and the students enrolled. While the course structure tends to remain the same, the students enrolled change each semester, presenting a new set of students who seek help in many different ways. We share our observations in investigating student behavior when using course resources, including office hours and online discussion forums, in two introductory computer science courses. Our goal is to explore differences in help-seeking behavior to construct Student "Help-Seeking" Personas. Preliminary analysis has shown that, for these two introductory CS courses, there are no well-defined personas that emerge from the grouping of help-seeking behaviors. The demographic of students exhibiting various help-seeking behaviors tends to be a near-proportionate subset of the overall course demographic. Thus, no distinct personas emerge from the students' help-seeking behaviors in introductory CS courses that faculty can utilize to better understand their students.}, booktitle={Proceedings of the 54th ACM Technical Symposium on Computer Science Education V. 2}, author={Zahn, Matthew and Heckman, Sarah}, year={2022}, month={Mar}, pages={1380–1380} } @article{carver_heckman_sherriff_2022, title={Training Computing Educators to Become Computing Education Researchers}, DOI={10.1145/3478431.3499297}, abstractNote={The computing education community endeavors to consistently move forward, improving the educational experience of our students. As new innovations in computing education practice are learned and shared, however, these papers may not exhibit the desired qualities that move simple experience reports to true Scholarship of Teaching and Learning (SoTL). We report on our six years of experience in running professional development for computing educators in empirical research methods for social and behavioral studies in the classroom. Our goal is to have a direct impact on instructors who are in the beginning stages of transitioning their educational innovations from anecdotal to empirical results that can be replicated by instructors at other institutions. To achieve this, we created a year-long mentoring experience, beginning with a multi-day workshop on empirical research methods during the summer, followed by regular mentoring sessions with participants, and culminating in a follow-up session at the following year's SIGCSE Technical Symposium. From survey results and as evidenced by eventual research results and publications from participants, we believe that our method of structuring empirical research professional development was successful and could be a model for similar programs in other areas.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Carver, Jeffrey C. and Heckman, Sarah and Sherriff, Mark}, year={2022}, pages={724–730} } @inproceedings{carver_heckman_sherriff_2022, title={Training computing educators to become computing education researchers}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2022}, pages={724–730} } @inproceedings{presler-marshall_heckman_stolee_2022, title={What Makes Team [s] Work? A Study of Team Characteristics in Software Engineering Projects}, booktitle={Proceedings of the 2022 ACM Conference on International Computing Education Research-Volume 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2022}, pages={177–188} } @article{gao_heckman_lynch_2022, title={Who Uses Office Hours? A Comparison of In-Person and Virtual Office Hours Utilization}, DOI={10.1145/3478431.3499334}, abstractNote={In Computer Science (CS) education, instructors use office hours for one-on-one help-seeking. Prior work has shown that traditional in-person office hours may be underutilized. In response many instructors are adding or transitioning to virtual office hours. Our research focuses on comparing in-person and online office hours to investigate differences between performance, interaction time, and the characteristics of the students who utilize in-person and virtual office hours. We analyze a rich dataset covering two semesters of a CS2 course which used in-person office hours in Fall 2019 and virtual office hours in Fall 2020. Our data covers students' use of office hours, the nature of their questions, and the time spent receiving help as well as demographic and attitude data. Our results show no relationship between student's attendance in office hours and class performance. However we found that female students attended office hours more frequently, as did students with a fixed mindset in computing, and those with weaker skills in transferring theory to practice. We also found that students with low confidence in or low enjoyment toward CS were more active in virtual office hours. Finally, we observed a significant correlation between students attending virtual office hours and an increased interest in CS study; while students attending in-person office hours tend to show an increase in their growth mindset.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Gao, Zhikai and Heckman, Sarah and Lynch, Collin}, year={2022}, pages={300–306} } @inproceedings{gao_heckman_lynch_2022, title={Who uses office hours? a comparison of in-person and virtual office hours utilization}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Gao, Zhikai and Heckman, Sarah and Lynch, Collin}, year={2022}, pages={300–306} } @article{gao_erickson_xu_lynch_heckman_barnes_others_2022, title={You asked, now what? Modeling Students' Help-Seeking and Coding actions from Request to Resolution}, volume={14}, number={3}, journal={Journal of Educational Data Mining}, author={Gao, Zhikai and Erickson, Bradley and Xu, Yiqiao and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany and others}, year={2022}, pages={109–131} } @article{heckman_carver_sherriff_al-zubidy_2021, title={A Systematic Literature Review of Empiricism and Norms of Reporting in Computing Education Research Literature}, volume={22}, number={1}, journal={ACM Transactions on Computing Education (TOCE)}, publisher={ACM New York, NY}, author={Heckman, Sarah and Carver, Jeffrey C and Sherriff, Mark and Al-Zubidy, Ahmed}, year={2021}, pages={1–46} } @article{gao_lynch_heckman_barnes_2021, title={Automatically Classifying Student Help Requests: A Multi-Year Analysis.}, journal={International Educational Data Mining Society}, publisher={International Educational Data Mining Society}, author={Gao, Zhikai and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany}, year={2021} } @inproceedings{basu_heckman_maher_2021, title={Online Vs Face-to-face Web-development Course: Course Strategies, Learning, and Engagement}, booktitle={Proceedings of the 52nd ACM Technical Symposium on Computer Science Education}, author={Basu, Debarati and Heckman, Sarah and Maher, Mary Lou}, year={2021}, pages={1191–1197} } @article{akintunde_limke_barnes_heckman_lynch_2021, title={PEDI - Piazza Explorer Dashboard for Intervention}, ISSN={["1943-6092"]}, DOI={10.1109/VL/HCC51201.2021.9576443}, abstractNote={Analytics about how students navigate online learning tools throughout the duration of an assignment is scarce. Knowledge about how students use online tools before a course's end could positively impact students' learning outcomes. We introduce PEDI (Piazza Explorer Dashboard for Intervention), a tool which analyzes and presents visualizations of forum activity on Piazza, a question and answer forum, to instructors. We outline the design principles and data-informed recommendations used to design PEDI. Our prior research revealed two critical periods in students' forum engagement over the duration of an assignment. Early engagement in the first half of an assignment duration positively correlates with class average performance. Whereas, extremely high engagement toward the deadline predicted lower class average performance. PEDI uses these findings to detect and flag troubling engagement levels and informs instructors through clear visualizations to promote data-informed interventions. By providing insights to instructors, PEDI may improve class performance and pave the way for a new generation of online tools.}, journal={2021 IEEE SYMPOSIUM ON VISUAL LANGUAGES AND HUMAN-CENTRIC COMPUTING (VL/HCC 2021)}, author={Akintunde, Ruth Okoilu and Limke, Ally and Barnes, Tiffany and Heckman, Sarah and Lynch, Collin}, year={2021} } @inproceedings{akintunde_limke_barnes_heckman_lynch_2021, title={PEDI-Piazza Explorer Dashboard for Intervention}, booktitle={2021 IEEE Symposium on Visual Languages and Human-Centric Computing (VL/HCC)}, author={Akintunde, Ruth Okoilu and Limke, Ally and Barnes, Tiffany and Heckman, Sarah and Lynch, Collin}, year={2021}, pages={1–4} } @article{presler-marshall_heckman_stolee_2021, title={SQLRepair: Identifying and Repairing Mistakes in Student-Authored SQL Queries}, DOI={10.1109/ICSE-SEET52601.2021.00030}, abstractNote={Computer science educators seek to understand the types of mistakes that students make when learning a new (programming) language so that they can help students avoid those mistakes in the future. While educators know what mistakes students regularly make in languages such as C and Python, students struggle with SQL and regularly make mistakes when working with it. We present an analysis of mistakes that students made when first working with SQL, classify the types of errors introduced, and provide suggestions on how to avoid them going forward. In addition, we present an automated tool, SQLRepair, that is capable of repairing errors introduced by undergraduate programmers when writing SQL queries. Our results show that students find repairs produced by our tool comparable in understandability to queries written by themselves or by other students, suggesting that SQL repair tools may be useful in an educational context. We also provide to the community a benchmark of SQL queries written by the students in our study that we used for evaluation of SQLRepair.}, journal={2021 IEEE/ACM 43RD INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING: JOINT TRACK ON SOFTWARE ENGINEERING EDUCATION AND TRAINING (ICSE-JSEET 2021)}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2021}, pages={199–210} } @inproceedings{presler-marshall_heckman_stolee_2021, title={SQLRepair: Identifying and Repairing Mistakes in Student-Authored SQL Queries}, booktitle={2021 IEEE/ACM 43rd International Conference on Software Engineering: Software Engineering Education and Training (ICSE-SEET)}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2021}, pages={199–210} } @article{heckman_schmidt_king_2020, title={Integrating Testing Throughout the CS Curriculum}, ISSN={["2159-4848"]}, DOI={10.1109/ICSTW50294.2020.00079}, abstractNote={Software testing is a critical component of any software development lifecycle, but becoming an experienced software tester requires understanding many strategies for writing high-quality test cases and a significant amount of practice. Situated learning theory suggests that students should be exposed to things they would see in a professional workplace. In terms of software testing, students should be exposed to real-world software testing practices in a variety of contexts, from the simplest of programs to the very complex. The goal of this paper is to share our experience integrating software testing into our undergraduate curriculum at North Carolina State University. In this paper, we discuss how software testing is taught in our CS1 - Introductory Programming, CS2 - Software Development Fundamentals, and several other courses beyond CS2. Over the past 10 years of teaching software testing in introductory programming courses, we discuss lessons learned and highlight open concerns for future research.}, journal={2020 IEEE 13TH INTERNATIONAL CONFERENCE ON SOFTWARE TESTING, VERIFICATION AND VALIDATION WORKSHOPS (ICSTW)}, author={Heckman, Sarah and Schmidt, Jessica Young and King, Jason}, year={2020}, pages={441–444} } @inproceedings{heckman_schmidt_king_2020, title={Integrating Testing Throughout the CS Curriculum}, booktitle={2020 IEEE International Conference on Software Testing, Verification and Validation Workshops (ICSTW)}, author={Heckman, Sarah and Schmidt, Jessica Young and King, Jason}, year={2020}, pages={441–444} } @article{gitinabard_okoilu_xu_heckman_barnes_lynch_2020, title={Student Teamwork on Programming Projects: What can GitHub logs show us?}, journal={arXiv preprint arXiv:2008.11262}, author={Gitinabard, Niki and Okoilu, Ruth and Xu, Yiqao and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2020} } @article{heckman_fain_pérez-quiñones_2019, title={Building and expanding a successful undergraduate research program}, volume={35}, number={4}, journal={Journal of Computing Sciences in Colleges}, publisher={Consortium for Computing Sciences in Colleges}, author={Heckman, Sarah and Fain, Brandon and Pérez-Quiñones, Manuel}, year={2019}, pages={18–19} } @article{gitinabard_xu_heckman_barnes_lynch_2019, title={How Widely Can Prediction Models Be Generalized? Performance Prediction in Blended Courses}, volume={12}, ISSN={["1939-1382"]}, url={https://doi.org/10.1109/TLT.2019.2911832}, DOI={10.1109/TLT.2019.2911832}, abstractNote={Blended courses that mix in-person instruction with online platforms are increasingly popular in secondary education. These tools record a rich amount of data on students' study habits and social interactions. Prior research has shown that these metrics are correlated with students' performance in face to face classes. However, predictive models for blended courses are still limited and have not yet succeeded at early prediction or cross-class predictions even for repeated offerings of the same course. In this work, we use data from two offerings of two different undergraduate courses to train and evaluate predictive models on student performance based upon persistent student characteristics including study habits and social interactions. We analyze the performance of these models on the same offering, on different offerings of the same course, and across courses to see how well they generalize. We also evaluate the models on different segments of the courses to determine how early reliable predictions can be made. This work tells us in part how much data is required to make robust predictions and how cross-class data may be used, or not, to boost model performance. The results of this study will help us better understand how similar the study habits, social activities, and the teamwork styles are across semesters for students in each performance category. These trained models also provide an avenue to improve our existing support platforms to better support struggling students early in the semester with the goal of providing timely intervention.}, number={2}, journal={IEEE TRANSACTIONS ON LEARNING TECHNOLOGIES}, publisher={Institute of Electrical and Electronics Engineers (IEEE)}, author={Gitinabard, Niki and Xu, Yiqiao and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F.}, year={2019}, pages={184–197} } @article{hawthorne_pérez-quiñones_heckman_zhang_2019, title={SIGCSE technical symposium 2019 report}, volume={51}, number={2}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Hawthorne, Elizabeth K and Pérez-Quiñones, Manuel A and Heckman, Sarah and Zhang, Jian}, year={2019}, pages={2–4} } @article{zhang_sherriff_heckman_cutter_monge_2019, title={SIGCSE technical symposium 2020 call for submissions}, volume={51}, number={3}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Zhang, Jian and Sherriff, Mark and Heckman, Sarah and Cutter, Pam and Monge, Alvaro}, year={2019}, pages={2–3} } @inproceedings{presler-marshall_horton_heckman_stolee_2019, title={Wait, Wait. No, Tell Me. Analyzing Selenium Configuration Effects on Test Flakiness}, booktitle={2019 IEEE/ACM 14th International Workshop on Automation of Software Test (AST)}, author={Presler-Marshall, Kai and Horton, Eric and Heckman, Sarah and Stolee, Kathryn}, year={2019}, pages={7–13} } @article{gitinabard_heckman_barnes_lynch_2019, title={What will you do next? A sequence analysis on the student transitions between online platforms in blended courses}, journal={arXiv preprint arXiv:1905.00928}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F}, year={2019} } @inproceedings{heckman_stolee_parnin_2018, title={10+ years of teaching software engineering with itrust: the good, the bad, and the ugly}, booktitle={Proceedings of the 40th International Conference on Software Engineering: Software Engineering Education and Training}, author={Heckman, Sarah and Stolee, Kathryn T and Parnin, Christopher}, year={2018}, pages={1–4} } @article{heckman_stolee_parnin_2018, title={10+Years of Teaching Software Engineering with iTrust: the Good, the Bad, and the Ugly}, ISSN={["0270-5257"]}, DOI={10.1145/3183377.3183393}, abstractNote={This paper presents an experience report with a junior-level software engineering course at North Carolina State University. We provide an overview of the course structure and the course project, iTrust, that has been developed by students over 25 semesters. We summarize reflections from faculty, teaching assistants, and students (through course evaluations). From our lessons learned, we present our course improvements as we prepare for the next ten years of software engineering courses. Our main lessons learned are 1) course technologies have a lifespan and require periodic updating to balance student learning and working with a legacy system; 2) teaching assistant longevity and support is critical to course success; and 3) the value of working with a large, legacy system in a semester long course is supported by faculty, teaching assistants, and eventually students.}, journal={2018 IEEE/ACM 40TH INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING: SOFTWARE ENGINEERING EDUCATION AND TRAINING (ICSE-SEET)}, author={Heckman, Sarah and Stolee, Kathryn T. and Parnin, Christopher}, year={2018}, pages={1–4} } @article{sherriff_heckman_2018, title={Capstones and large projects in computing education}, volume={18}, number={2}, journal={ACM Transactions on Computing Education (TOCE)}, publisher={ACM New York, NY, USA}, author={Sherriff, Mark and Heckman, Sarah}, year={2018}, pages={1–4} } @article{carver_heckman_sherriff_2018, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, DOI={10.1145/3159450.3162350}, journal={SIGCSE'18: PROCEEDINGS OF THE 49TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION}, author={Carver, Jeffery C. and Heckman, Sarah and Sherriff, Mark}, year={2018}, pages={1051–1051} } @inproceedings{carver_heckman_sherriff_2018, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, booktitle={Proceedings of the 49th ACM Technical Symposium on Computer Science Education}, author={Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2018}, pages={1051–1051} } @article{heckman_king_2018, place={New York, NY, USA}, title={Developing Software Engineering Skills using Real Tools for Automated Grading}, DOI={10.1145/3159450.3159595}, abstractNote={Situated learning theory supports engaging students with materials and resources that reflect professional standards and best practices. Starting with our introductory courses, we incorporate situated learning to support student engagement in software engineering practices and processes through the use of industrial strength open-source tools in several classes throughout the undergraduate computer science curriculum at NC State University. Additionally, these tools support several logistical and educational needs in computer science classrooms, including assignment submission systems and automated grading. In this tools paper, we present our Canary Framework for supporting software engineering practices through the use of Eclipse for development; GitHub for submission and collaboration; and Jenkins for continuous integration and automated grading. These tools are used in five of ten core courses by more than 3000 students over ten semesters. While the use of these tools in education is not unique, we want to share our model of using professional tools in a classroom setting and our experiences on how this framework can support multiple courses throughout the curriculum and at scale.}, journal={SIGCSE'18: PROCEEDINGS OF THE 49TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION}, publisher={ACM}, author={Heckman, Sarah and King, Jason}, year={2018}, pages={794–799} } @inproceedings{heckman_king_2018, title={Developing Software Engineering Skills using Real Tools for Automated Grading}, booktitle={Proceedings of the 49th ACM Technical Symposium on Computer Science Education}, author={Heckman, Sarah and King, Jason}, year={2018}, pages={794–799} } @inproceedings{sheshadri_gitinabard_lynch_barnes_heckman_2018, title={Predicting student performance based on online study habits: a study of blended courses}, booktitle={the 11th International Conference on Educational Data Mining (EDM 2018)}, author={Sheshadri, Adithya and Gitinabard, Niki and Lynch, Collin F and Barnes, Tiffany and Heckman, Sarah}, year={2018}, pages={87–96} } @article{heckman_zhang_peérez-quiñones_hawthorne_2018, title={SIGCSE 2019 paper length change}, volume={50}, number={2}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Heckman, Sarah and Zhang, Jian and Peérez-Quiñones, Manuel A and Hawthorne, Elizabeth K}, year={2018}, pages={4–4} } @article{heckman_zhang_pérez-quiñones_hawthorne_2018, title={What is a SIGCSE symposium paper?}, volume={50}, number={3}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Heckman, Sarah and Zhang, Jian and Pérez-Quiñones, Manuel A and Hawthorne, Elizabeth K}, year={2018}, pages={3–3} } @article{gitinabard_xue_lynch_heckman_barnes_2017, title={A Social Network Analysis on Blended Courses}, journal={arXiv preprint arXiv:1709.10215}, author={Gitinabard, Niki and Xue, Linting and Lynch, Collin F and Heckman, Sarah and Barnes, Tiffany}, year={2017} } @article{bahler_battestilli_demaria_healey_heckman_heil_lester_mott_mealin_novitsky_et al._2017, title={Conversations (oral history interviews) with members of North Carolina State University Computer Science Department by Carol Lee and Carolyn Miller}, publisher={Department of Computer Science, North Carolina State University}, author={Bahler, Dennis and Battestilli, Lina and DeMaria, Mark and Healey, Christopher and Heckman, Sarah and Heil, Margaret and Lester, James and Mott, Bradford and Mealin, Sean and Novitsky, Melissa and et al.}, year={2017} } @inproceedings{vellukunnel_buffum_boyer_forbes_heckman_mayer-patel_2017, title={Deconstructing the Discussion Forum: Student Questions and Computer Science Learning}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Vellukunnel, Mickey and Buffum, Philip and Boyer, Kristy Elizabeth and Forbes, Jeffrey and Heckman, Sarah and Mayer-Patel, Ketan}, year={2017}, pages={603–608} } @inproceedings{heckman_carver_sherriff_2017, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Heckman, Sarah and Carver, Jeffrey C and Sherriff, Mark}, year={2017}, pages={737–737} } @article{gitinabard_lynch_heckman_barnes_2017, title={Identifying Student Communities in Blended Courses}, journal={arXiv preprint arXiv:1710.04129}, author={Gitinabard, Niki and Lynch, Collin F and Heckman, Sarah and Barnes, Tiffany}, year={2017} } @inproceedings{smith_boyer_forbes_heckman_mayer-patel_2017, title={My Digital Hand: A Tool for Scaling Up One-to-One Peer Teaching in Support of Computer Science Learning}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Smith, Aaron J and Boyer, Kristy Elizabeth and Forbes, Jeffrey and Heckman, Sarah and Mayer-Patel, Ketan}, year={2017}, pages={549–554} } @inproceedings{al-zubidy_carver_heckman_sherriff_2016, title={A (Updated) Review of Empiricism at the SIGCSE Technical Symposium}, booktitle={Proceedings of the 47th ACM Technical Symposium on Computing Science Education}, author={Al-Zubidy, Ahmed and Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2016}, pages={120–125} } @article{johnson_pandita_smith_ford_elder_murphy-hill_heckman_sadowski_2016, title={A Cross-Tool Study on Program Analysis Tool Notification Communication}, author={Johnson, Brittany and Pandita, Rahul and Smith, Justin and Ford, Denae and Elder, Sarah and Murphy-Hill, Emerson and Heckman, Sarah and Sadowski, Caitlin}, year={2016} } @inproceedings{johnson_pandita_smith_ford_elder_murphy-hill_heckman_sadowski_2016, title={A cross-tool communication study on program analysis tool notifications}, booktitle={Proceedings of the 2016 24th ACM SIGSOFT International Symposium on Foundations of Software Engineering}, author={Johnson, Brittany and Pandita, Rahul and Smith, Justin and Ford, Denae and Elder, Sarah and Murphy-Hill, Emerson and Heckman, Sarah and Sadowski, Caitlin}, year={2016}, pages={73–84} } @inproceedings{heckman_king_2016, title={Teaching Software Engineering Skills in CS1. 5: Incorporating Real-world Practices and Tools}, booktitle={Proceedings of the 47th ACM Technical Symposium on Computing Science Education}, author={Heckman, Sarah and King, Jason}, year={2016}, pages={696–697} } @inproceedings{heckman_2015, title={An Empirical Study of In-Class Laboratories on Student Learning of Linear Data Structures}, booktitle={Proceedings of the eleventh annual International Conference on International Computing Education Research}, author={Heckman, Sarah S}, year={2015}, pages={217–225} } @inproceedings{heckman_king_winters_2015, title={Automating Software Engineering Best Practices Using an Open Source Continuous Integration Framework}, booktitle={Proceedings of the 46th ACM Technical Symposium on Computer Science Education}, author={Heckman, Sarah and King, Jason and Winters, Michael}, year={2015}, pages={677–677} } @article{johnson_pandita_murphy-hill_heckman_2015, title={Bespoke Tools: Adapted to the Concepts Developers Know}, DOI={10.1145/2786805.2803197}, abstractNote={Even though different developers have varying levels of expertise, the tools in one developer's integrated development environment (IDE) behave the same as the tools in every other developers' IDE. In this paper, we propose the idea of automatically customizing development tools by modeling what a developer knows about software concepts. We then sketch three such ``bespoke'' tools and describe how development data can be used to infer what a developer knows about relevant concepts. Finally, we describe our ongoing efforts to make bespoke program analysis tools that customize their notifications to the developer using them.}, journal={2015 10TH JOINT MEETING OF THE EUROPEAN SOFTWARE ENGINEERING CONFERENCE AND THE ACM SIGSOFT SYMPOSIUM ON THE FOUNDATIONS OF SOFTWARE ENGINEERING (ESEC/FSE 2015) PROCEEDINGS}, author={Johnson, Brittany and Pandita, Rahul and Murphy-Hill, Emerson and Heckman, Sarah}, year={2015}, pages={878–881} } @inproceedings{johnson_pandita_murphy-hill_heckman_2015, title={Bespoke tools: adapted to the concepts developers know}, booktitle={Proceedings of the 2015 10th Joint Meeting on Foundations of Software Engineering}, author={Johnson, Brittany and Pandita, Rahul and Murphy-Hill, Emerson and Heckman, Sarah}, year={2015}, pages={878–881} } @article{anderson_heckman_vouk_wright_carter_burge_gannod_2015, title={CS/SE Instructors Can Improve Student Writing without Reducing Class Time Devoted to Technical Content: Experimental Results}, DOI={10.1109/icse.2015.178}, abstractNote={The Computer Science and Software Engineering (CS/SE) profession reports that new college graduates lack the communication skills needed for personal and organizational success. Many CS/SE faculty may omit communication instruction from their courses because they do not want to reduce technical content. We experimented in a software-engineering-intensive second-semester programming course with strategies for improving students' writing of black box test plans that included no instruction on writing the plans beyond the standard lecture on testing. The treatment version of the course used 1) a modified assignment that focused on the plan's readers, 2) a model plan students could consult online, and 3) a modified grading rubric that identified the readers' needs. Three external raters found that students in the treatment sections outperformed students in the control sections on writing for five of nine criteria on rubrics for evaluating the plans and on the raters' holistic impression of the students' technical and communication abilities from the perspectives of a manager and a tester.}, journal={2015 IEEE/ACM 37TH IEEE INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING, VOL 2}, author={Anderson, Paul V. and Heckman, Sarah and Vouk, Mladen and Wright, David and Carter, Michael and Burge, Janet E. and Gannod, Gerald C.}, year={2015}, pages={455–464} } @inproceedings{anderson_heckman_vouk_wright_carter_burge_gannod_2015, title={CS/SE instructors can improve student writing without reducing class time devoted to technical content: experimental results}, volume={2}, booktitle={2015 IEEE/ACM 37th IEEE International Conference on Software Engineering}, author={Anderson, Paul V and Heckman, Sarah and Vouk, Mladen and Wright, David and Carter, Michael and Burge, Janet E and Gannod, Gerald C}, year={2015}, pages={455–464} } @inproceedings{sherriff_heckman_2015, title={Empirical Research in CS Education}, booktitle={Proceedings of the 46th ACM Technical Symposium on Computer Science Education}, author={Sherriff, Mark and Heckman, Sarah}, year={2015}, pages={701–701} } @inproceedings{heckman_williams_2013, title={A comparative evaluation of static analysis actionable alert identification techniques}, booktitle={Proceedings of the 9th International Conference on Predictive Models in Software Engineering}, author={Heckman, Sarah and Williams, Laurie}, year={2013}, pages={1–10} } @book{carter_fornaro_heckman_heil_2012, title={Developing a learning progression that integrates communication in an undergraduate CD/SE curriculum}, institution={North Carolina State University. Dept. of Computer Science}, author={Carter, Michael and Fornaro, Robert and Heckman, Sarah Smith and Heil, Margaret}, year={2012} } @article{heckman_williams_2011, title={A systematic literature review of actionable alert identification techniques for automated static code analysis}, volume={53}, number={4}, journal={Information and Software Technology}, publisher={Elsevier}, author={Heckman, Sarah and Williams, Laurie}, year={2011}, pages={363–387} } @inproceedings{heckman_horton_sherriff_2011, title={Teaching second-level Java and software engineering with Android}, DOI={10.1109/cseet.2011.5876144}, abstractNote={Over the past two years, second-year Java and software engineering courses have been taught at the University of Virginia and North Carolina State University utilizing the Android OS platform. Instructors taught a variety of traditional second-year topics, including abstraction, design, requirements, and testing, utilizing a variety of Android-based mobile devices. Anecdotal responses from student surveys and evaluations from five course sessions indicate that teaching lower-level courses with more advanced and current technology, even with a steeper learning curve, is beneficial. In this tutorial proposal, we outline our plan for presenting a session that would help educators incorporate the Android OS into their curriculum and how to use the system even if mobile devices are not available.}, booktitle={2011 24th IEEE-CS Conference on Software Engineering Education and Training (CSEET)}, author={Heckman, Sarah and Horton, T. B. and Sherriff, M.}, year={2011}, pages={540–542} } @article{heckman_2010, title={Software testing (CS1 & CS2)}, volume={6}, journal={NC State University, August}, author={Heckman, Sarah}, year={2010} } @inproceedings{heckman_williams_2009, title={A model building process for identifying actionable static analysis alerts}, booktitle={2009 International Conference on Software Testing Verification and Validation}, author={Heckman, Sarah and Williams, Laurie}, year={2009}, pages={161–170} } @phdthesis{heckman_2009, title={A systematic model building process for predicting actionable static analysis alerts}, school={North Carolina State University}, author={Heckman, Sarah Smith}, year={2009} } @book{heckman_williams_2008, title={A measurement framework of alert characteristics for false positive mitigation models}, institution={North Carolina State University. Dept. of Computer Science}, author={Heckman, Sarah Smith and Williams, Laurie Ann}, year={2008} } @inproceedings{heckman_williams_2008, title={On establishing a benchmark for evaluating static analysis alert prioritization and classification techniques}, booktitle={Proceedings of the Second ACM-IEEE international symposium on Empirical software engineering and measurement}, author={Heckman, Sarah and Williams, Laurie}, year={2008}, pages={41–50} } @inproceedings{heckman_2007, title={Adaptive probabilistic model for ranking code-based static analysis alerts}, booktitle={29th International Conference on Software Engineering (ICSE'07 Companion)}, author={Heckman, Sarah Smith}, year={2007}, pages={89–90} } @article{heckman_2007, title={Adaptively ranking alerts generated from automated static analysis}, volume={14}, number={1}, journal={XRDS: Crossroads, The ACM Magazine for Students}, publisher={ACM New York, NY, USA}, author={Heckman, Sarah Smith}, year={2007}, pages={1–11} } @inproceedings{sherriff_heckman_lake_williams_2007, title={Identifying fault-prone files using static analysis alerts through singular value decomposition}, booktitle={Proceedings of the 2007 conference of the center for advanced studies on Collaborative research}, author={Sherriff, Mark and Heckman, Sarah Smith and Lake, Mike and Williams, Laurie}, year={2007}, pages={276–279} } @inproceedings{sherriff_heckman_lake_williams_2007, title={Using groupings of static analysis alerts to identify files likely to contain field failures}, booktitle={The 6th Joint Meeting on European software engineering conference and the ACM SIGSOFT symposium on the foundations of software engineering: companion papers}, author={Sherriff, Mark S and Heckman, Sarah Smith and Lake, J Michael and Williams, Laurie A}, year={2007}, pages={565–568} } @inproceedings{heckman_williams_2006, title={Automated adaptive ranking and filtering of static analysis alerts}, booktitle={Proc of the Fast abstract at the International Symposium on Software Reliability Engineering (ISSRE)}, author={Heckman, Sarah and Williams, Laurie}, year={2006} } @article{heckman_gehringer, title={Google Forms as an Enhanced Classroom Response System}, author={Heckman, Sarah and Gehringer, Edward F} }