@article{oliveira_gao_heckman_lynch_2024, title={Exploring Novice Programmer Testing Behavior: A First Step to Define Coding Struggle}, url={https://doi.org/10.1145/3626252.3630851}, DOI={10.1145/3626252.3630851}, abstractNote={To promote good coding practices, we need to understand what students do when they are on their own. In this research study, we explore students' testing behavior and response to persistent errors to better understand their coding patterns. We investigate how those patterns change when they struggle, and how help-seeking might influence their coding behaviors. We define struggle during coding as failing the same unit test case consecutively for more than four submission events, considering only unit test cases created by the instructors. To analyze the students' coding data, we use progress indicators, student test implementation indicators, and both student-generated and instructor-generated unit test results from each student submission event. In addition, we use office hours attendance records and amount of assignment-related posts created on the course forum. Results show that students tend not to follow test-driven development practices, even when explicitly directed to, and tend to create unit tests only to earn assignment credit rather than to guide their software development. Students also tend not to modify their own unit tests once they have earned the related credits, even when facing coding struggle; they tend to modify their unit tests only after they have been facing coding struggle for an extended number of submission events.}, journal={PROCEEDINGS OF THE 55TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION, SIGCSE 2024, VOL. 1}, author={Oliveira, Gabriel Silva and Gao, Zhikai and Heckman, Sarah and Lynch, Collin}, year={2024}, pages={1251–1257} } @article{reckinger_hummel_heckman_2024, title={Traditional vs. Flexible Modalities in a Data Structures Class}, url={https://doi.org/10.1145/3626252.3630952}, DOI={10.1145/3626252.3630952}, abstractNote={This experience report presents results from a quasi-experiment comparing course performance and student-reported survey constructs between two groups of students. One group took a Data Structures course with traditional, in-person modality. The second group took the same course with flexible, online modality. The work was motivated by the rapid adjustments computer science instructors made due to remote learning during the COVID-19 pandemic. In a response to these forced changes, this study was set up to investigate the differences between pre- and post- pandemic modalities. There are 212 students in the study, which took place in Fall 2021 at an R1, minority serving institution in the Midwestern United States. The study found that students in both groups performed similarly on common course components like projects, labs, homework, and a final Data Structures assessment. There were not significant differences in their self-reported ease of learning, enjoyment, belongingness, attitude, mindset, and self-efficacy. However, when taking into consideration gender, we found that women's performance was lower than men's in the traditional modality course. Women's performance in the flexible modality course was on par with men. Lastly, we present some feedback from students relating to assessments and modality.}, journal={PROCEEDINGS OF THE 55TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION, SIGCSE 2024, VOL. 1}, author={Reckinger, Shanon and Hummel, Joe and Heckman, Sarah}, year={2024}, pages={1112–1118} } @inproceedings{mcgill_heckman_liut_sanusi_szabo_2024, title={Unlocking Excellence in Educational Research: Guidelines for High-Quality Research that Promotes Learning for All}, url={https://doi.org/10.1145/3626253.3633402}, DOI={10.1145/3626253.3633402}, abstractNote={While there are multiple standards bodies that define characteristics of high-quality, there are limited guidelines on conducting equity-enabling research, particularly in the context of high quality and in computing education. As part of an ACM ITiCSE Working Group in 2023, we engaged in a concept analysis and structured literature review to identify high-impact practices for conducting both high-quality and equity-enabling education research. As a result of this work, we produced a set of guidelines across each major phase of research that integrates characteristics of high-quality education research with those that are necessary for producing research that is designed to honor and meet the needs of various subgroups of learners. Special emphasis is given to the role that the researcher plays in shaping the research based upon how the researcher's lived experiences, perspectives, and training influences their work. During this special session, we will review each set of guidelines and engage attendees in reflection and discussion of them and how they can use the guidelines to enhance their education research.}, author={McGill, Monica M. and Heckman, Sarah and Liut, Michael and Sanusi, Ismaila Temitayo and Szabo, Claudia}, year={2024}, month={Mar} } @inproceedings{gao_gaweda_lynch_heckman_babalola_oliveira_2024, title={Using Survival Analysis to Model Students' Patience in Online Office Hour Queues}, url={https://doi.org/10.1145/3626253.3635517}, DOI={10.1145/3626253.3635517}, author={Gao, Zhikai and Gaweda, Adam and Lynch, Collin and Heckman, Sarah and Babalola, Damilola and Oliveira, Gabriel Silva}, year={2024}, month={Mar} } @inproceedings{gransbury_mcgill_thompson_heckman_rosato_delyser_2023, title={A Framework of Factors that Influence Academic Achievement in Computer Science within Capacity, Access, Participation and Experience}, url={https://doi.org/10.1145/3568812.3603481}, DOI={10.1145/3568812.3603481}, abstractNote={Motivation. There are countless factors that contribute to academic achievement [3, 6, 7, 8]. For K-12 computer science (CS) education, these building blocks of student success are broadly captured in the four components of the CAPE framework, which considers the capacity for, access to, participation in, and experiences of students learning CS through a lens of diversity, equity, and inclusion [4]. However, there is currently little published research that identifies factors within each component that influence the education ecosystem that students rely on to participate and have positive learning experiences in CS [10]. Research question. Our research question for this study was: What are factors impacting academic achievement among K-12 CS students that comprise each component of CAPE? Methodology. We conducted a systematic mapping review [1] of relevant literature using a deductive coding technique. We first created an a priori codebook based on previous research findings that identified factors that contribute to academic achievement [3, 7, 8, 9] including the CS Teachers Association K-12 CS Standards [2]. We grouped these factors into capacity (69 factors), access (12 factors), participation (4 factors), or experience (84 factors). We then conducted the systematic mapping review of K-12 CS education research articles (2019-2021) (n = 196) from publicly available data from the K-12 CS Education Research Resource Center [9]. Adapting the Framework Method for coding [5], two researchers carefully read and examined each article, identifying all the factors studied in each. The researchers noted discrepancies, which were then reported to the larger team to review and resolve. Through this mapping, we found additional factors (n=129) that have been studied by researchers, some of which are specific to CS education, and added these to our codebook. Results. As a result of this analysis, we created a comprehensive codebook containing over 300 factors that contribute to the educational ecosystem for CS. Given the large number of factors, we used similar method groupings by Hattie and Yates to group factors within each CAPE component into subcomponents, subcomponents into categories, and categories into subcategories [6]. These groupings provide researchers with an easy way to understand and search for the various factors within a component. Figure 1 shows the subcomponents and categories for the Capacity component. Each of these factors can, in a small or large way, impact academic achievement among students. Implications. The expanded CAPE framework is now publicly available and can be used to inform researchers and practitioners what each CAPE component comprises. This codebook is accompanied by descriptions of each factor and, in some cases, examples. Not only does it surface the many factors to be considered when designing and delivering CS education to K-12 students, it also provides a solid framework for studying various factors and how they can be correlated. In future planned research, we intend to provide the ability to quickly find articles providing direct or indirect impact evidence, which can be used to inform decision making and practice through meta-synthesis and meta-analysis.}, author={Gransbury, Isabella and McGill, Monica M. and Thompson, Angelica and Heckman, Sarah and Rosato, Jennifer and Delyser, Leigh Ann}, year={2023}, month={Aug} } @article{bai_sthapit_heckman_price_stolee_2023, title={An Experience Report on Introducing Explicit Strategies into Testing Checklists for Advanced Beginners}, url={https://doi.org/10.1145/3587102.3588781}, DOI={10.1145/3587102.3588781}, abstractNote={Software testing is a critical skill for computing students, but learning and practicing testing can be challenging, particularly for beginners. A recent study suggests that a lightweight testing checklist that contains testing strategies and tutorial information could assist students in writing quality tests. However, students expressed a desire for more support in knowing how to test the code/scenario. Moreover, the potential costs and benefits of the testing checklist are not yet examined in a classroom setting. To that end, we improved the checklist by integrating explicit testing strategies to it (ETS Checklist), which provide step-by-step guidance on how to transfer semantic information from instructions to the possible testing scenarios. In this paper, we report our experiences in designing explicit strategies in unit testing, as well as adapting the ETS Checklist as optional tool support in a CS1.5 course. With the quantitative and qualitative analysis of the survey responses and lab assignment submissions generated by students, we discuss students' engagement with the ETS Checklists. Our results suggest that students who used the checklist intervention had significantly higher quality in their student-authored test code, in terms of code coverage, compared to those who did not, especially for assignments earlier in the course. We also observed students' unawareness of their need for help in writing high-quality tests.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL 1}, author={Bai, Gina R. and Sthapit, Sandeep and Heckman, Sarah and Price, Thomas W. and Stolee, Kathryn T.}, year={2023}, pages={194–200} } @article{bai_sthapit_heckman_price_stolee_2023, title={An Experience Report on Introducing Explicit Strategies into Testing Checklists for Advanced Beginners}, author={Bai, Gina R and Sthapit, Sandeep and Heckman, Sarah and Price, Thomas W and Stolee, Kathryn T}, year={2023} } @article{gitinabard_gao_heckman_barnes_lynch_others_2023, title={Analysis of Student Pair Teamwork Using GitHub Activities}, volume={15}, number={1}, journal={Journal of Educational Data Mining}, author={Gitinabard, Niki and Gao, Zhikai and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F and others}, year={2023}, pages={32–62} } @article{zahn_gransbury_heckman_battestilli_2023, title={Assessment of Self-Identified Learning Struggles in CS2 Programming Assignments}, url={https://doi.org/10.1145/3587102.3588786}, DOI={10.1145/3587102.3588786}, abstractNote={Students can have widely varying experiences while working on CS2 coding projects. Challenging experiences can lead to lower motivation and less success in completing these assignments. In this paper, we identify the common struggles CS2 students face while working on course projects and examine whether or not there is evidence of improvement in these areas of struggle between projects. While previous work has been conducted on understanding the importance of self-regulated learning to student success, it has not been fully investigated in the scope of CS2 coursework. We share our observations on investigating student struggles while working on coding projects through their self-reported response to a project reflection form. We apply emergent coding to identify student struggles at three points during the course and compare them against student actions in the course, such as project start times and office hours participation, to identify if students were overcoming these struggles. Through our coding and analysis we have found that while a majority of students encounter struggles with time management and debugging of failing tests, students tend to emphasize wanting to improve their time management skills in future coding assignments.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL 1}, author={Zahn, Matthew and Gransbury, Isabella and Heckman, Sarah and Battestilli, Lina}, year={2023}, pages={264–270} } @article{mcgill_heckman_chytas_diaz_liut_kazakova_sanusi_shah_szabo_2023, title={Building Recommendations for Conducting Equity-Focused, High Quality K-12 Computer Science Education Research}, url={https://doi.org/10.1145/3587103.3594207}, DOI={10.1145/3587103.3594207}, abstractNote={To investigate and identify promising practices in equitable K-12 computer science (CS) education, the capacity for education researchers to conduct this research must be rapidly built globally. Simultaneously, concerns have arisen over the last few years about the quality of research that is being conducted and the lack of equity-focused research. In this working group, we will tackle the research question: In what ways can previous research standards inform high-quality, equity-focused K-12 CS education research? We will use existing research and various standards bodies (e.g., European Educational Research Association, Australian Education Research Organisation, CONSORT, American Psychological Association) to synthesize key features in the context of equity-focused K-12 CS education research. We will then vet these attributes with experts who can provide feedback and refine our recommendations and guidelines. Our working group will select the experts using a strata reflecting a diversity of backgrounds and experiences to support our focus on student populations that have been historically marginalized in computing (e.g., low-income students, rural students, girls, students with disabilities). Our recommendations will directly impact future equitable computing education research by providing guidance on conducting high-quality research such that the findings can be aggregated and impact future policy with evidence-based results. While we recognize that different countries and regions may yield differing answers to this question, our recommendations will be robust enough that researchers in each country or region may choose to use those most appropriate to their context.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL. 2}, author={McGill, Monica M. and Heckman, Sarah and Chytas, Christos and Diaz, Lien and Liut, Michael and Kazakova, Vera and Sanusi, Ismaila Temitayo and Shah, Selina Marianna and Szabo, Claudia}, year={2023}, pages={565–566} } @article{mcgill_thompson_gransbury_heckman_rosato_delyser_2023, title={Building upon the CAPE Framework for Broader Understanding of Capacity in K-12 CS Education}, url={https://doi.org/10.1145/3545945.3569799}, DOI={10.1145/3545945.3569799}, abstractNote={Research Problem. The CAPE Framework has been used in multiple studies to situate capacity-building efforts within schools to offer equitable student access to and participation in K-12 computer science (CS) education. CAPE defines four major components of capacity, access, participation and experience. However, to define what each of the CAPE components can entail, well-defined subcomponents are needed. Research Question. Our research questions for this study were: What are the possible subcomponents for Capacity in the CAPE framework? and How feasible is it to use the newly defined subcomponents in a gap analysis study? Methodology. We conducted a qualitative content analysis by creating a codebook from an existing data framework and literature review. We reframed earlier findings on factors that influence student learning and academic achievement into the CAPE. Findings. We vetted an expanded framework that includes eight Capacity subcomponents, a third (categories) layer and a fourth (subcategories) layer that can be used to disaggregate the many elements that comprise Capacity. For our trial analysis of 196 articles, we added several codes at the category and subcategory level, but found no gaps in the codes for our a priori defined subcomponents. Implications. The extended Capacity framework can be used by others to inform its usage and develop a consensus of what is included within each subcomponent for Capacity, develop instrumentation and protocols for exploring Capacity at a more granular level, conduct scoping and literature reviews, and understand how various variables play a part in the CS educational ecosystem.}, journal={PROCEEDINGS OF THE 54TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION, VOL 1, SIGCSE 2023}, author={McGill, Monica M. and Thompson, Angelica and Gransbury, Isabella and Heckman, Sarah and Rosato, Jennifer and DeLyser, Leigh Ann}, year={2023}, pages={577–582} } @article{mcgill_heckman_chytas_liut_kazakova_sanusi_shah_szabo_2023, title={Conducting Sound, Equity-Enabling Computing Education Research}, url={https://doi.org/10.1145/3623762.3633495}, DOI={10.1145/3623762.3633495}, abstractNote={Problem. To investigate and identify promising practices in equitable K-12 and tertiary computer science (CS) education, the capacity for education researchers to conduct this research must be rapidly built globally. Simultaneously, concerns have arisen over the last few years about the quality of research that is being conducted and the lack of research that supports teaching all students computing. Research Question. Our research question for our study was: In what ways can existing research standards and practices inform methodologically sound, equity-enabling computing education research? Methodology. We conducted a concept analysis using existing research and various standards (e.g. European Educational Research Association, Australian Education Research Organisation, American Psychological Association). We then synthesised key features in the context of equity-focused K-12 computing education research. Findings. We present a set of guidelines for general research design that takes into account best practices across the standards that are infused with equity-enabling research practices. Implications. Our guidelines will directly impact future equitable computing education research by providing guidance on conducting high-quality research such that the findings can be aggregated and impact future policy with evidence-based results. Because we have crafted these guidelines to be broadly applicable across a variety of settings, we believe that they will be useful to researchers operating in a variety of contexts.}, journal={PROCEEDINGS OF THE 2023 WORKING GROUP REPORTS ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE-WGR 2023}, author={McGill, Monica M. and Heckman, Sarah and Chytas, Christos and Liut, Michael and Kazakova, Vera and Sanusi, Ismaila Temitayo and Shah, Selina Marianna and Szabo, Claudia}, year={2023} } @article{presler-marshall_heckman_stolee_2023, title={Improving Grading Outcomes in Software Engineering Projects Through Automated Contributions Summaries}, ISSN={["2832-756X"]}, DOI={10.1109/ICSE-SEET58685.2023.00030}, abstractNote={Teaming is a key aspect of most professional software engineering positions, and consequently, team-based learning (TBL) features heavily in many undergraduate computer science (CS) and software engineering programs. However, while TBL offers many pedagogical benefits, it is not without challenges. One such challenge is assessment, as the course teaching staff must be able to accurately identify individual students’ contributions to both encourage and reward participation. In this paper, we study improvements to grading practises in the context of a CS1.5 introductory software engineering course, where assessing individual students’ contributions to weekly lab assignments is done manually by teaching assistants (TAs). We explore the impact of presenting TAs with automated summaries of individual student contributions to their team’s GitHub repository. To do so, we propose a novel algorithm, and implement a tool based off of it, AutoVCS. We measure the impact on grading metrics in terms of grading speed, grading consistency, and TA satisfaction. We evaluate our algorithm, as implemented in AutoVCS, in a controlled experimental study on Java-based lab assignments from a recent offering of NC State University’s CS1.5 course. We find our automated summaries help TAs grade more consistently and provides students with more actionable feedback. Although TAs grade no faster using automated summaries, they nonetheless strongly prefer grading with the support of them than without. We conclude with recommendations for future work to explore improving consistency in contribution grading for student software engineering teams.}, journal={2023 IEEE/ACM 45TH INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING-SOFTWARE ENGINEERING EDUCATION AND TRAINING, ICSE-SEET}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2023}, pages={259–270} } @inproceedings{gao_lynch_heckman_2023, title={Too long to wait and not much to do: Modeling student behaviors while waiting for help in online office hours.}, booktitle={Proceedings of the 7th Educational Data Mining in Computer Science Education (CSEDM) Workshop}, author={Gao, Zhikai and Lynch, Collin and Heckman, Sarah}, year={2023} } @article{heckman_carver_sherriff_al-zubidy_2022, title={A Systematic Literature Review of Empiricism and Norms of Reporting in Computing Education Research Literature}, url={https://doi.org/10.1145/3470652}, DOI={10.1145/3470652}, abstractNote={ Context. Computing Education Research (CER) is critical to help the computing education community and policy makers support the increasing population of students who need to learn computing skills for future careers. For a community to systematically advance knowledge about a topic, the members must be able to understand published work thoroughly enough to perform replications, conduct meta-analyses, and build theories. There is a need to understand whether published research allows the CER community to systematically advance knowledge and build theories. }, journal={ACM Transactions on Computing Education}, author={Heckman, Sarah and Carver, Jeffrey C. and Sherriff, Mark and Al-zubidy, Ahmed}, year={2022}, month={Mar} } @inproceedings{battestilli_zahn_heckman_2022, title={Academic Help Seeking Patterns in Introductory Computer Science Courses}, booktitle={2022 ASEE Annual Conference & Exposition}, author={Battestilli, Lina and Zahn, Matthew and Heckman, Sarah}, year={2022} } @inproceedings{heckman_minnes_2022, title={Academic Middle Management: Undergraduate Leadership in Computing Programs}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education V. 2}, author={Heckman, Sarah and Minnes, Mia}, year={2022}, pages={1184–1184} } @inproceedings{gao_erickson_xu_lynch_heckman_barnes_2022, title={Admitting you have a problem is the first step: Modeling when and why students seek help in programming assignments}, booktitle={Proceedings of the 15th International Conference on Educational Data Mining, A. Mitrovic and N. Bosch, Eds. International Educational Data Mining Society, Durham, United Kingdom}, author={Gao, Zhikai and Erickson, Bradley and Xu, Yiqiao and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany}, year={2022}, pages={508–514} } @article{erickson_heckman_lynch_2022, title={Characterizing Student Development Progress: Validating Student Adherence to Project Milestones}, DOI={10.1145/3478431.3499373}, abstractNote={As enrollment in CS programs have risen, it has become increasingly difficult for teaching staff to provide timely and detailed guidance on student projects. To address this, instructors use automated assessment tools to evaluate students' code and processes as they work. Even with automation, understanding students' progress, and more importantly, if students are making the 'right' progress toward the solution is challenging at scale. To help students manage their time and learn good software engineering processes, instructors may create intermediate deadlines, or milestones, to support progress. However, student's adherence to these processes is opaque and may hinder student success and instructional support. Better understanding of how students follow process guidance in practice is needed to identify the right assignment structures to support development of high-quality process skills. We use data collected from an automated assessment tool, to calculate a set of 15 progress indicators to investigate which types of progress are being made during four stages of two projects in a CS2 course. These stages are split up by milestones to help guide student activities. We show how looking at which progress indicators are triggered significantly more or less during each stage validates whether students are adhering to the goals of each milestone. We also find students trigger some progress indicators earlier on the second project suggesting improving processes over time.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Erickson, Bradley and Heckman, Sarah and Lynch, Collin F.}, year={2022}, pages={15–21} } @inproceedings{erickson_heckman_lynch_2022, title={Characterizing Student Development Progress: Validating Student Adherence to Project Milestones}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Erickson, Bradley and Heckman, Sarah and Lynch, Collin F}, year={2022}, pages={15–21} } @article{gitinabard_heckman_barnes_lynch_2022, title={Designing a Dashboard for Student Teamwork Analysis}, DOI={10.1145/3478431.3499377}, abstractNote={Classroom dashboards are designed to help instructors effectively orchestrate classrooms by providing summary statistics, activity tracking, and other information. Existing dashboards are generally specific to an LMS or platform and they generally summarize individual work, not group behaviors. However, CS courses typically involve constellations of tools and mix on- and offline collaboration. Thus, cross-platform monitoring of individuals and teams is important to develop a full picture of the class. In this work, we describe our work on Concert, a data integration platform that collects data about student activities from several sources such as Piazza, My Digital Hand, and GitHub and uses it to support classroom monitoring through analysis and visualizations. We discuss team visualizations that we have developed to support effective group management and to help instructors identify teams in need of intervention.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2022}, pages={446–452} } @inproceedings{gitinabard_heckman_barnes_lynch_2022, title={Designing a dashboard for student teamwork analysis}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2022}, pages={446–452} } @inproceedings{mannekote_celepkolu_galdo_boyer_israel_heckman_stephens-martinez_2022, title={Don't Just Paste Your Stacktrace: Shaping Discussion Forums in Introductory CS Courses}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education V. 2}, author={Mannekote, Amogh and Celepkolu, Mehmet and Galdo, Aisha Chung and Boyer, Kristy Elizabeth and Israel, Maya and Heckman, Sarah and Stephens-Martinez, Kristin}, year={2022}, pages={1164–1164} } @article{presler-marshall_heckman_stolee_2022, title={Identifying Struggling Teams in Software Engineering Courses ThroughWeekly Surveys}, DOI={10.1145/3478431.3499367}, abstractNote={Teaming is increasingly a core aspect of professional software engineering and most undergraduate computer science curricula. At NC State University, we teach communication and project-management skills explicitly through a junior-level software engineering course. However, some students may have a dysfunctional team experience that imperils their ability to learn these skills. Identifying these teams during a team project is important so the teaching staff can intervene early and hopefully alleviate the issues. We propose a weekly reflection survey to help the course teaching staff proactively identify teams that may not be on track to learn the course outcomes. The questions on the survey focus on team communication and collaboration over the previous week. We evaluate our survey on two semesters of the undergraduate software engineering course by comparing teams with poor end-of-project grades or peer evaluations against teams flagged on a weekly basis through the surveys. We find that the survey can identify most teams that later struggled on the project, typically by the half-way mark of the project, and thus may provide instructors with an actionable early-warning about struggling teams. Furthermore, a majority of students (64.4%) found the survey to be a helpful tool for keeping their team on track. Finally, we discuss future work for improving the survey and engaging with student teams.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2022}, pages={126–132} } @inproceedings{presler-marshall_heckman_stolee_2022, title={Identifying struggling teams in software engineering courses through weekly surveys}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2022}, pages={126–132} } @inproceedings{zahn_heckman_2022, title={Observations on Student Help-Seeking Behaviors in Introductory Computer Science Courses}, url={https://doi.org/10.1145/3545947.3576325}, DOI={10.1145/3545947.3576325}, abstractNote={The help-seeking interactions faculty encounter will vary depending upon the course structure and the students enrolled. While the course structure tends to remain the same, the students enrolled change each semester, presenting a new set of students who seek help in many different ways. We share our observations in investigating student behavior when using course resources, including office hours and online discussion forums, in two introductory computer science courses. Our goal is to explore differences in help-seeking behavior to construct Student "Help-Seeking" Personas. Preliminary analysis has shown that, for these two introductory CS courses, there are no well-defined personas that emerge from the grouping of help-seeking behaviors. The demographic of students exhibiting various help-seeking behaviors tends to be a near-proportionate subset of the overall course demographic. Thus, no distinct personas emerge from the students' help-seeking behaviors in introductory CS courses that faculty can utilize to better understand their students.}, booktitle={Proceedings of the 54th ACM Technical Symposium on Computer Science Education V. 2}, author={Zahn, Matthew and Heckman, Sarah}, year={2022}, month={Mar}, pages={1380–1380} } @article{carver_heckman_sherriff_2022, title={Training Computing Educators to Become Computing Education Researchers}, DOI={10.1145/3478431.3499297}, abstractNote={The computing education community endeavors to consistently move forward, improving the educational experience of our students. As new innovations in computing education practice are learned and shared, however, these papers may not exhibit the desired qualities that move simple experience reports to true Scholarship of Teaching and Learning (SoTL). We report on our six years of experience in running professional development for computing educators in empirical research methods for social and behavioral studies in the classroom. Our goal is to have a direct impact on instructors who are in the beginning stages of transitioning their educational innovations from anecdotal to empirical results that can be replicated by instructors at other institutions. To achieve this, we created a year-long mentoring experience, beginning with a multi-day workshop on empirical research methods during the summer, followed by regular mentoring sessions with participants, and culminating in a follow-up session at the following year's SIGCSE Technical Symposium. From survey results and as evidenced by eventual research results and publications from participants, we believe that our method of structuring empirical research professional development was successful and could be a model for similar programs in other areas.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Carver, Jeffrey C. and Heckman, Sarah and Sherriff, Mark}, year={2022}, pages={724–730} } @inproceedings{carver_heckman_sherriff_2022, title={Training computing educators to become computing education researchers}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2022}, pages={724–730} } @inproceedings{presler-marshall_heckman_stolee_2022, title={What Makes Team [s] Work? A Study of Team Characteristics in Software Engineering Projects}, booktitle={Proceedings of the 2022 ACM Conference on International Computing Education Research-Volume 1}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2022}, pages={177–188} } @article{gao_heckman_lynch_2022, title={Who Uses Office Hours? A Comparison of In-Person and Virtual Office Hours Utilization}, DOI={10.1145/3478431.3499334}, abstractNote={In Computer Science (CS) education, instructors use office hours for one-on-one help-seeking. Prior work has shown that traditional in-person office hours may be underutilized. In response many instructors are adding or transitioning to virtual office hours. Our research focuses on comparing in-person and online office hours to investigate differences between performance, interaction time, and the characteristics of the students who utilize in-person and virtual office hours. We analyze a rich dataset covering two semesters of a CS2 course which used in-person office hours in Fall 2019 and virtual office hours in Fall 2020. Our data covers students' use of office hours, the nature of their questions, and the time spent receiving help as well as demographic and attitude data. Our results show no relationship between student's attendance in office hours and class performance. However we found that female students attended office hours more frequently, as did students with a fixed mindset in computing, and those with weaker skills in transferring theory to practice. We also found that students with low confidence in or low enjoyment toward CS were more active in virtual office hours. Finally, we observed a significant correlation between students attending virtual office hours and an increased interest in CS study; while students attending in-person office hours tend to show an increase in their growth mindset.}, journal={PROCEEDINGS OF THE 53RD ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION (SIGCSE 2022), VOL 1}, author={Gao, Zhikai and Heckman, Sarah and Lynch, Collin}, year={2022}, pages={300–306} } @inproceedings{gao_heckman_lynch_2022, title={Who uses office hours? a comparison of in-person and virtual office hours utilization}, booktitle={Proceedings of the 53rd ACM Technical Symposium on Computer Science Education-Volume 1}, author={Gao, Zhikai and Heckman, Sarah and Lynch, Collin}, year={2022}, pages={300–306} } @article{gao_erickson_xu_lynch_heckman_barnes_others_2022, title={You asked, now what? Modeling Students' Help-Seeking and Coding actions from Request to Resolution}, volume={14}, number={3}, journal={Journal of Educational Data Mining}, author={Gao, Zhikai and Erickson, Bradley and Xu, Yiqiao and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany and others}, year={2022}, pages={109–131} } @article{heckman_carver_sherriff_al-zubidy_2021, title={A Systematic Literature Review of Empiricism and Norms of Reporting in Computing Education Research Literature}, volume={22}, number={1}, journal={ACM Transactions on Computing Education (TOCE)}, publisher={ACM New York, NY}, author={Heckman, Sarah and Carver, Jeffrey C and Sherriff, Mark and Al-Zubidy, Ahmed}, year={2021}, pages={1–46} } @article{gao_lynch_heckman_barnes_2021, title={Automatically Classifying Student Help Requests: A Multi-Year Analysis.}, journal={International Educational Data Mining Society}, publisher={International Educational Data Mining Society}, author={Gao, Zhikai and Lynch, Collin and Heckman, Sarah and Barnes, Tiffany}, year={2021} } @inproceedings{basu_heckman_maher_2021, title={Online Vs Face-to-face Web-development Course: Course Strategies, Learning, and Engagement}, booktitle={Proceedings of the 52nd ACM Technical Symposium on Computer Science Education}, author={Basu, Debarati and Heckman, Sarah and Maher, Mary Lou}, year={2021}, pages={1191–1197} } @article{akintunde_limke_barnes_heckman_lynch_2021, title={PEDI - Piazza Explorer Dashboard for Intervention}, ISSN={["1943-6092"]}, DOI={10.1109/VL/HCC51201.2021.9576443}, abstractNote={Analytics about how students navigate online learning tools throughout the duration of an assignment is scarce. Knowledge about how students use online tools before a course's end could positively impact students' learning outcomes. We introduce PEDI (Piazza Explorer Dashboard for Intervention), a tool which analyzes and presents visualizations of forum activity on Piazza, a question and answer forum, to instructors. We outline the design principles and data-informed recommendations used to design PEDI. Our prior research revealed two critical periods in students' forum engagement over the duration of an assignment. Early engagement in the first half of an assignment duration positively correlates with class average performance. Whereas, extremely high engagement toward the deadline predicted lower class average performance. PEDI uses these findings to detect and flag troubling engagement levels and informs instructors through clear visualizations to promote data-informed interventions. By providing insights to instructors, PEDI may improve class performance and pave the way for a new generation of online tools.}, journal={2021 IEEE SYMPOSIUM ON VISUAL LANGUAGES AND HUMAN-CENTRIC COMPUTING (VL/HCC 2021)}, author={Akintunde, Ruth Okoilu and Limke, Ally and Barnes, Tiffany and Heckman, Sarah and Lynch, Collin}, year={2021} } @inproceedings{akintunde_limke_barnes_heckman_lynch_2021, title={PEDI-Piazza Explorer Dashboard for Intervention}, booktitle={2021 IEEE Symposium on Visual Languages and Human-Centric Computing (VL/HCC)}, author={Akintunde, Ruth Okoilu and Limke, Ally and Barnes, Tiffany and Heckman, Sarah and Lynch, Collin}, year={2021}, pages={1–4} } @article{presler-marshall_heckman_stolee_2021, title={SQLRepair: Identifying and Repairing Mistakes in Student-Authored SQL Queries}, DOI={10.1109/ICSE-SEET52601.2021.00030}, abstractNote={Computer science educators seek to understand the types of mistakes that students make when learning a new (programming) language so that they can help students avoid those mistakes in the future. While educators know what mistakes students regularly make in languages such as C and Python, students struggle with SQL and regularly make mistakes when working with it. We present an analysis of mistakes that students made when first working with SQL, classify the types of errors introduced, and provide suggestions on how to avoid them going forward. In addition, we present an automated tool, SQLRepair, that is capable of repairing errors introduced by undergraduate programmers when writing SQL queries. Our results show that students find repairs produced by our tool comparable in understandability to queries written by themselves or by other students, suggesting that SQL repair tools may be useful in an educational context. We also provide to the community a benchmark of SQL queries written by the students in our study that we used for evaluation of SQLRepair.}, journal={2021 IEEE/ACM 43RD INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING: JOINT TRACK ON SOFTWARE ENGINEERING EDUCATION AND TRAINING (ICSE-JSEET 2021)}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T.}, year={2021}, pages={199–210} } @inproceedings{presler-marshall_heckman_stolee_2021, title={SQLRepair: Identifying and Repairing Mistakes in Student-Authored SQL Queries}, booktitle={2021 IEEE/ACM 43rd International Conference on Software Engineering: Software Engineering Education and Training (ICSE-SEET)}, author={Presler-Marshall, Kai and Heckman, Sarah and Stolee, Kathryn T}, year={2021}, pages={199–210} } @article{heckman_schmidt_king_2020, title={Integrating Testing Throughout the CS Curriculum}, ISSN={["2159-4848"]}, DOI={10.1109/ICSTW50294.2020.00079}, abstractNote={Software testing is a critical component of any software development lifecycle, but becoming an experienced software tester requires understanding many strategies for writing high-quality test cases and a significant amount of practice. Situated learning theory suggests that students should be exposed to things they would see in a professional workplace. In terms of software testing, students should be exposed to real-world software testing practices in a variety of contexts, from the simplest of programs to the very complex. The goal of this paper is to share our experience integrating software testing into our undergraduate curriculum at North Carolina State University. In this paper, we discuss how software testing is taught in our CS1 – Introductory Programming, CS2 – Software Development Fundamentals, and several other courses beyond CS2. Over the past 10 years of teaching software testing in introductory programming courses, we discuss lessons learned and highlight open concerns for future research.}, journal={2020 IEEE 13TH INTERNATIONAL CONFERENCE ON SOFTWARE TESTING, VERIFICATION AND VALIDATION WORKSHOPS (ICSTW)}, author={Heckman, Sarah and Schmidt, Jessica Young and King, Jason}, year={2020}, pages={441–444} } @inproceedings{heckman_schmidt_king_2020, title={Integrating Testing Throughout the CS Curriculum}, booktitle={2020 IEEE International Conference on Software Testing, Verification and Validation Workshops (ICSTW)}, author={Heckman, Sarah and Schmidt, Jessica Young and King, Jason}, year={2020}, pages={441–444} } @article{gitinabard_okoilu_xu_heckman_barnes_lynch_2020, title={Student Teamwork on Programming Projects: What can GitHub logs show us?}, journal={arXiv preprint arXiv:2008.11262}, author={Gitinabard, Niki and Okoilu, Ruth and Xu, Yiqao and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin}, year={2020} } @article{heckman_fain_pérez-quiñones_2019, title={Building and expanding a successful undergraduate research program}, volume={35}, number={4}, journal={Journal of Computing Sciences in Colleges}, publisher={Consortium for Computing Sciences in Colleges}, author={Heckman, Sarah and Fain, Brandon and Pérez-Quiñones, Manuel}, year={2019}, pages={18–19} } @article{gitinabard_xu_heckman_barnes_lynch_2019, title={How Widely Can Prediction Models Be Generalized? Performance Prediction in Blended Courses}, volume={12}, ISSN={["1939-1382"]}, url={https://doi.org/10.1109/TLT.2019.2911832}, DOI={10.1109/TLT.2019.2911832}, abstractNote={Blended courses that mix in-person instruction with online platforms are increasingly common in secondary education. These platforms record a rich amount of data on students’ study habits and social interactions. Prior research has shown that these metrics are correlated with students performance in face-to-face classes. However, predictive models for blended courses are still limited and have not yet succeeded at early prediction or cross-class predictions, even for repeated offerings of the same course. In this paper, we use data from two offerings of two different undergraduate courses to train and evaluate predictive models of student performance based on persistent student characteristics including study habits and social interactions. We analyze the performance of these models on the same offering, on different offerings of the same course, and across courses to see how well they generalize. We also evaluate the models on different segments of the courses to determine how early reliable predictions can be made. This paper tells us in part how much data is required to make robust predictions and how cross-class data may be used, or not, to boost model performance. The results of this study will help us better understand how similar the study habits, social activities, and the teamwork styles are across semesters for students in each performance category. These trained models also provide an avenue to improve our existing support platforms to better support struggling students early in the semester with the goal of providing timely intervention.}, number={2}, journal={IEEE TRANSACTIONS ON LEARNING TECHNOLOGIES}, publisher={Institute of Electrical and Electronics Engineers (IEEE)}, author={Gitinabard, Niki and Xu, Yiqiao and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F.}, year={2019}, pages={184–197} } @article{hawthorne_pérez-quiñones_heckman_zhang_2019, title={SIGCSE technical symposium 2019 report}, volume={51}, number={2}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Hawthorne, Elizabeth K and Pérez-Quiñones, Manuel A and Heckman, Sarah and Zhang, Jian}, year={2019}, pages={2–4} } @article{zhang_sherriff_heckman_cutter_monge_2019, title={SIGCSE technical symposium 2020 call for submissions}, volume={51}, number={3}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Zhang, Jian and Sherriff, Mark and Heckman, Sarah and Cutter, Pam and Monge, Alvaro}, year={2019}, pages={2–3} } @inproceedings{presler-marshall_horton_heckman_stolee_2019, title={Wait, Wait. No, Tell Me. Analyzing Selenium Configuration Effects on Test Flakiness}, booktitle={2019 IEEE/ACM 14th International Workshop on Automation of Software Test (AST)}, author={Presler-Marshall, Kai and Horton, Eric and Heckman, Sarah and Stolee, Kathryn}, year={2019}, pages={7–13} } @article{gitinabard_heckman_barnes_lynch_2019, title={What will you do next? A sequence analysis on the student transitions between online platforms in blended courses}, journal={arXiv preprint arXiv:1905.00928}, author={Gitinabard, Niki and Heckman, Sarah and Barnes, Tiffany and Lynch, Collin F}, year={2019} } @inproceedings{heckman_stolee_parnin_2018, title={10+ years of teaching software engineering with itrust: the good, the bad, and the ugly}, booktitle={Proceedings of the 40th International Conference on Software Engineering: Software Engineering Education and Training}, author={Heckman, Sarah and Stolee, Kathryn T and Parnin, Christopher}, year={2018}, pages={1–4} } @article{heckman_stolee_parnin_2018, title={10+Years of Teaching Software Engineering with iTrust: the Good, the Bad, and the Ugly}, ISSN={["0270-5257"]}, DOI={10.1145/3183377.3183393}, abstractNote={This paper presents an experience report with a junior-level software engineering course at North Carolina State University. We provide an overview of the course structure and the course project, iTrust, that has been developed by students over 25 semesters. We summarize reflections from faculty, teaching assistants, and students (through course evaluations). From our lessons learned, we present our course improvements as we prepare for the next ten years of software engineering courses. Our main lessons learned are 1) course technologies have a lifespan and require periodic updating to balance student learning and working with a legacy system; 2) teaching assistant longevity and support is critical to course success; and 3) the value of working with a large, legacy system in a semester long course is supported by faculty, teaching assistants, and eventually students.}, journal={2018 IEEE/ACM 40TH INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING: SOFTWARE ENGINEERING EDUCATION AND TRAINING (ICSE-SEET)}, author={Heckman, Sarah and Stolee, Kathryn T. and Parnin, Christopher}, year={2018}, pages={1–4} } @article{sherriff_heckman_2018, title={Capstones and large projects in computing education}, volume={18}, number={2}, journal={ACM Transactions on Computing Education (TOCE)}, publisher={ACM New York, NY, USA}, author={Sherriff, Mark and Heckman, Sarah}, year={2018}, pages={1–4} } @article{carver_heckman_sherriff_2018, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, DOI={10.1145/3159450.3162350}, abstractNote={One of the most important, and difficult, aspects of starting an education research project is identifying an interesting, answerable, repeatable, measurable, and appropriately scoped research question. The lack of a valid research question reduces the potential impact of the work and could result in wasted effort. The goal of this workshop is to help educational researchers get off on the right foot by defining such a research question. This workshop is part of the larger Designing Empirical Education Research Studies (DEERS) project, which consists of an ongoing series of workshops in which researcher cohorts work with experienced empirical researchers to design, implement, evaluate, and publish empirical work in computer science education. In addition to instruction on the various aspects of good research questions, DEERS alumni will join us to mentor attendees in development of their own research questions in small group breakout sessions. At the end of the workshop, attendees will leave with a valid research question that can then be the start for designing a research study. Attendees will also receive information on how to apply to attend the full summer workshop, where they can fully flesh out the empirical study design, and join a DEERS research cohort. More information about DEERS can be found at http://empiricalcsed.org.}, journal={SIGCSE'18: PROCEEDINGS OF THE 49TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION}, author={Carver, Jeffery C. and Heckman, Sarah and Sherriff, Mark}, year={2018}, pages={1051–1051} } @inproceedings{carver_heckman_sherriff_2018, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, booktitle={Proceedings of the 49th ACM Technical Symposium on Computer Science Education}, author={Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2018}, pages={1051–1051} } @article{heckman_king_2018, place={New York, NY, USA}, title={Developing Software Engineering Skills using Real Tools for Automated Grading}, DOI={10.1145/3159450.3159595}, abstractNote={Situated learning theory supports engaging students with materials and resources that reflect professional standards and best practices. Starting with our introductory courses, we incorporate situated learning to support student engagement in software engineering practices and processes through the use of industrial strength open-source tools in several classes throughout the undergraduate computer science curriculum at NC State University. Additionally, these tools support several logistical and educational needs in computer science classrooms, including assignment submission systems and automated grading. In this tools paper, we present our Canary Framework for supporting software engineering practices through the use of Eclipse for development; GitHub for submission and collaboration; and Jenkins for continuous integration and automated grading. These tools are used in five of ten core courses by more than 3000 students over ten semesters. While the use of these tools in education is not unique, we want to share our model of using professional tools in a classroom setting and our experiences on how this framework can support multiple courses throughout the curriculum and at scale.}, journal={SIGCSE'18: PROCEEDINGS OF THE 49TH ACM TECHNICAL SYMPOSIUM ON COMPUTER SCIENCE EDUCATION}, publisher={ACM}, author={Heckman, Sarah and King, Jason}, year={2018}, pages={794–799} } @inproceedings{heckman_king_2018, title={Developing Software Engineering Skills using Real Tools for Automated Grading}, booktitle={Proceedings of the 49th ACM Technical Symposium on Computer Science Education}, author={Heckman, Sarah and King, Jason}, year={2018}, pages={794–799} } @inproceedings{sheshadri_gitinabard_lynch_barnes_heckman_2018, title={Predicting student performance based on online study habits: a study of blended courses}, booktitle={the 11th International Conference on Educational Data Mining (EDM 2018)}, author={Sheshadri, Adithya and Gitinabard, Niki and Lynch, Collin F and Barnes, Tiffany and Heckman, Sarah}, year={2018}, pages={87–96} } @article{heckman_zhang_peérez-quiñones_hawthorne_2018, title={SIGCSE 2019 paper length change}, volume={50}, number={2}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Heckman, Sarah and Zhang, Jian and Peérez-Quiñones, Manuel A and Hawthorne, Elizabeth K}, year={2018}, pages={4–4} } @article{heckman_zhang_pérez-quiñones_hawthorne_2018, title={What is a SIGCSE symposium paper?}, volume={50}, number={3}, journal={ACM SIGCSE Bulletin}, publisher={ACM}, author={Heckman, Sarah and Zhang, Jian and Pérez-Quiñones, Manuel A and Hawthorne, Elizabeth K}, year={2018}, pages={3–3} } @article{gitinabard_xue_lynch_heckman_barnes_2017, title={A Social Network Analysis on Blended Courses}, journal={arXiv preprint arXiv:1709.10215}, author={Gitinabard, Niki and Xue, Linting and Lynch, Collin F and Heckman, Sarah and Barnes, Tiffany}, year={2017} } @article{bahler_battestilli_demaria_healey_heckman_heil_lester_mott_mealin_novitsky_et al._2017, title={Conversations (oral history interviews) with members of North Carolina State University Computer Science Department by Carol Lee and Carolyn Miller}, publisher={Department of Computer Science, North Carolina State University}, author={Bahler, Dennis and Battestilli, Lina and DeMaria, Mark and Healey, Christopher and Heckman, Sarah and Heil, Margaret and Lester, James and Mott, Bradford and Mealin, Sean and Novitsky, Melissa and et al.}, year={2017} } @inproceedings{vellukunnel_buffum_boyer_forbes_heckman_mayer-patel_2017, title={Deconstructing the Discussion Forum: Student Questions and Computer Science Learning}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Vellukunnel, Mickey and Buffum, Philip and Boyer, Kristy Elizabeth and Forbes, Jeffrey and Heckman, Sarah and Mayer-Patel, Ketan}, year={2017}, pages={603–608} } @inproceedings{heckman_carver_sherriff_2017, title={Designing Empirical Education Research Studies (DEERS): Creating an Answerable Research Question}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Heckman, Sarah and Carver, Jeffrey C and Sherriff, Mark}, year={2017}, pages={737–737} } @article{gitinabard_lynch_heckman_barnes_2017, title={Identifying Student Communities in Blended Courses}, journal={arXiv preprint arXiv:1710.04129}, author={Gitinabard, Niki and Lynch, Collin F and Heckman, Sarah and Barnes, Tiffany}, year={2017} } @inproceedings{smith_boyer_forbes_heckman_mayer-patel_2017, title={My Digital Hand: A Tool for Scaling Up One-to-One Peer Teaching in Support of Computer Science Learning}, booktitle={Proceedings of the 2017 ACM SIGCSE Technical Symposium on Computer Science Education}, author={Smith, Aaron J and Boyer, Kristy Elizabeth and Forbes, Jeffrey and Heckman, Sarah and Mayer-Patel, Ketan}, year={2017}, pages={549–554} } @inproceedings{al-zubidy_carver_heckman_sherriff_2016, title={A (Updated) Review of Empiricism at the SIGCSE Technical Symposium}, booktitle={Proceedings of the 47th ACM Technical Symposium on Computing Science Education}, author={Al-Zubidy, Ahmed and Carver, Jeffrey C and Heckman, Sarah and Sherriff, Mark}, year={2016}, pages={120–125} } @article{johnson_pandita_smith_ford_elder_murphy-hill_heckman_sadowski_2016, title={A Cross-Tool Study on Program Analysis Tool Notification Communication}, author={Johnson, Brittany and Pandita, Rahul and Smith, Justin and Ford, Denae and Elder, Sarah and Murphy-Hill, Emerson and Heckman, Sarah and Sadowski, Caitlin}, year={2016} } @inproceedings{johnson_pandita_smith_ford_elder_murphy-hill_heckman_sadowski_2016, title={A cross-tool communication study on program analysis tool notifications}, booktitle={Proceedings of the 2016 24th ACM SIGSOFT International Symposium on Foundations of Software Engineering}, author={Johnson, Brittany and Pandita, Rahul and Smith, Justin and Ford, Denae and Elder, Sarah and Murphy-Hill, Emerson and Heckman, Sarah and Sadowski, Caitlin}, year={2016}, pages={73–84} } @inproceedings{heckman_king_2016, title={Teaching Software Engineering Skills in CS1. 5: Incorporating Real-world Practices and Tools}, booktitle={Proceedings of the 47th ACM Technical Symposium on Computing Science Education}, author={Heckman, Sarah and King, Jason}, year={2016}, pages={696–697} } @inproceedings{heckman_2015, title={An Empirical Study of In-Class Laboratories on Student Learning of Linear Data Structures}, booktitle={Proceedings of the eleventh annual International Conference on International Computing Education Research}, author={Heckman, Sarah S}, year={2015}, pages={217–225} } @inproceedings{heckman_king_winters_2015, title={Automating Software Engineering Best Practices Using an Open Source Continuous Integration Framework}, booktitle={Proceedings of the 46th ACM Technical Symposium on Computer Science Education}, author={Heckman, Sarah and King, Jason and Winters, Michael}, year={2015}, pages={677–677} } @article{johnson_pandita_murphy-hill_heckman_2015, title={Bespoke Tools: Adapted to the Concepts Developers Know}, DOI={10.1145/2786805.2803197}, abstractNote={Even though different developers have varying levels of expertise, the tools in one developer's integrated development environment (IDE) behave the same as the tools in every other developers' IDE. In this paper, we propose the idea of automatically customizing development tools by modeling what a developer knows about software concepts. We then sketch three such ``bespoke'' tools and describe how development data can be used to infer what a developer knows about relevant concepts. Finally, we describe our ongoing efforts to make bespoke program analysis tools that customize their notifications to the developer using them.}, journal={2015 10TH JOINT MEETING OF THE EUROPEAN SOFTWARE ENGINEERING CONFERENCE AND THE ACM SIGSOFT SYMPOSIUM ON THE FOUNDATIONS OF SOFTWARE ENGINEERING (ESEC/FSE 2015) PROCEEDINGS}, author={Johnson, Brittany and Pandita, Rahul and Murphy-Hill, Emerson and Heckman, Sarah}, year={2015}, pages={878–881} } @inproceedings{johnson_pandita_murphy-hill_heckman_2015, title={Bespoke tools: adapted to the concepts developers know}, booktitle={Proceedings of the 2015 10th Joint Meeting on Foundations of Software Engineering}, author={Johnson, Brittany and Pandita, Rahul and Murphy-Hill, Emerson and Heckman, Sarah}, year={2015}, pages={878–881} } @article{anderson_heckman_vouk_wright_carter_burge_gannod_2015, title={CS/SE Instructors Can Improve Student Writing without Reducing Class Time Devoted to Technical Content: Experimental Results}, DOI={10.1109/icse.2015.178}, abstractNote={The Computer Science and Software Engineering (CS/SE) profession reports that new college graduates lack the communication skills needed for personal and organizational success. Many CS/SE faculty may omit communication instruction from their courses because they do not want to reduce technical content. We experimented in a software-engineering-intensive second-semester programming course with strategies for improving students' writing of black box test plans that included no instruction on writing the plans beyond the standard lecture on testing. The treatment version of the course used 1) a modified assignment that focused on the plan's readers, 2) a model plan students could consult online, and 3) a modified grading rubric that identified the readers' needs. Three external raters found that students in the treatment sections outperformed students in the control sections on writing for five of nine criteria on rubrics for evaluating the plans and on the raters' holistic impression of the students' technical and communication abilities from the perspectives of a manager and a tester.}, journal={2015 IEEE/ACM 37TH IEEE INTERNATIONAL CONFERENCE ON SOFTWARE ENGINEERING, VOL 2}, author={Anderson, Paul V. and Heckman, Sarah and Vouk, Mladen and Wright, David and Carter, Michael and Burge, Janet E. and Gannod, Gerald C.}, year={2015}, pages={455–464} } @inproceedings{anderson_heckman_vouk_wright_carter_burge_gannod_2015, title={CS/SE instructors can improve student writing without reducing class time devoted to technical content: experimental results}, volume={2}, booktitle={2015 IEEE/ACM 37th IEEE International Conference on Software Engineering}, author={Anderson, Paul V and Heckman, Sarah and Vouk, Mladen and Wright, David and Carter, Michael and Burge, Janet E and Gannod, Gerald C}, year={2015}, pages={455–464} } @inproceedings{sherriff_heckman_2015, title={Empirical Research in CS Education}, booktitle={Proceedings of the 46th ACM Technical Symposium on Computer Science Education}, author={Sherriff, Mark and Heckman, Sarah}, year={2015}, pages={701–701} } @inproceedings{heckman_williams_2013, title={A comparative evaluation of static analysis actionable alert identification techniques}, booktitle={Proceedings of the 9th International Conference on Predictive Models in Software Engineering}, author={Heckman, Sarah and Williams, Laurie}, year={2013}, pages={1–10} } @book{carter_fornaro_heckman_heil_2012, title={Developing a learning progression that integrates communication in an undergraduate CD/SE curriculum}, institution={North Carolina State University. Dept. of Computer Science}, author={Carter, Michael and Fornaro, Robert and Heckman, Sarah Smith and Heil, Margaret}, year={2012} } @article{heckman_williams_2011, title={A systematic literature review of actionable alert identification techniques for automated static code analysis}, volume={53}, number={4}, journal={Information and Software Technology}, publisher={Elsevier}, author={Heckman, Sarah and Williams, Laurie}, year={2011}, pages={363–387} } @inproceedings{heckman_horton_sherriff_2011, title={Teaching second-level Java and software engineering with Android}, DOI={10.1109/cseet.2011.5876144}, abstractNote={Over the past two years, second-year Java and software engineering courses have been taught at the University of Virginia and North Carolina State University utilizing the Android OS platform. Instructors taught a variety of traditional second-year topics, including abstraction, design, requirements, and testing, utilizing a variety of Android-based mobile devices. Anecdotal responses from student surveys and evaluations from five course sessions indicate that teaching lower-level courses with more advanced and current technology, even with a steeper learning curve, is beneficial. In this tutorial proposal, we outline our plan for presenting a session that would help educators incorporate the Android OS into their curriculum and how to use the system even if mobile devices are not available.}, booktitle={2011 24th IEEE-CS Conference on Software Engineering Education and Training (CSEET)}, author={Heckman, Sarah and Horton, T. B. and Sherriff, M.}, year={2011}, pages={540–542} } @article{heckman_2010, title={Software testing (CS1 & CS2)}, volume={6}, journal={NC State University, August}, author={Heckman, Sarah}, year={2010} } @inproceedings{heckman_williams_2009, title={A model building process for identifying actionable static analysis alerts}, booktitle={2009 International Conference on Software Testing Verification and Validation}, author={Heckman, Sarah and Williams, Laurie}, year={2009}, pages={161–170} } @phdthesis{heckman_2009, title={A systematic model building process for predicting actionable static analysis alerts}, school={North Carolina State University}, author={Heckman, Sarah Smith}, year={2009} } @book{heckman_williams_2008, title={A measurement framework of alert characteristics for false positive mitigation models}, institution={North Carolina State University. Dept. of Computer Science}, author={Heckman, Sarah Smith and Williams, Laurie Ann}, year={2008} } @inproceedings{heckman_williams_2008, title={On establishing a benchmark for evaluating static analysis alert prioritization and classification techniques}, booktitle={Proceedings of the Second ACM-IEEE international symposium on Empirical software engineering and measurement}, author={Heckman, Sarah and Williams, Laurie}, year={2008}, pages={41–50} } @inproceedings{heckman_2007, title={Adaptive probabilistic model for ranking code-based static analysis alerts}, booktitle={29th International Conference on Software Engineering (ICSE'07 Companion)}, author={Heckman, Sarah Smith}, year={2007}, pages={89–90} } @article{heckman_2007, title={Adaptively ranking alerts generated from automated static analysis}, volume={14}, number={1}, journal={XRDS: Crossroads, The ACM Magazine for Students}, publisher={ACM New York, NY, USA}, author={Heckman, Sarah Smith}, year={2007}, pages={1–11} } @inproceedings{sherriff_heckman_lake_williams_2007, title={Identifying fault-prone files using static analysis alerts through singular value decomposition}, booktitle={Proceedings of the 2007 conference of the center for advanced studies on Collaborative research}, author={Sherriff, Mark and Heckman, Sarah Smith and Lake, Mike and Williams, Laurie}, year={2007}, pages={276–279} } @inproceedings{sherriff_heckman_lake_williams_2007, title={Using groupings of static analysis alerts to identify files likely to contain field failures}, booktitle={The 6th Joint Meeting on European software engineering conference and the ACM SIGSOFT symposium on the foundations of software engineering: companion papers}, author={Sherriff, Mark S and Heckman, Sarah Smith and Lake, J Michael and Williams, Laurie A}, year={2007}, pages={565–568} } @inproceedings{heckman_williams_2006, title={Automated adaptive ranking and filtering of static analysis alerts}, booktitle={Proc of the Fast abstract at the International Symposium on Software Reliability Engineering (ISSRE)}, author={Heckman, Sarah and Williams, Laurie}, year={2006} } @article{heckman_gehringer, title={Google Forms as an Enhanced Classroom Response System}, author={Heckman, Sarah and Gehringer, Edward F} }