@article{bai_sthapit_heckman_price_stolee_2023, title={An Experience Report on Introducing Explicit Strategies into Testing Checklists for Advanced Beginners}, url={https://doi.org/10.1145/3587102.3588781}, DOI={10.1145/3587102.3588781}, abstractNote={Software testing is a critical skill for computing students, but learning and practicing testing can be challenging, particularly for beginners. A recent study suggests that a lightweight testing checklist that contains testing strategies and tutorial information could assist students in writing quality tests. However, students expressed a desire for more support in knowing how to test the code/scenario. Moreover, the potential costs and benefits of the testing checklist are not yet examined in a classroom setting. To that end, we improved the checklist by integrating explicit testing strategies to it (ETS Checklist), which provide step-by-step guidance on how to transfer semantic information from instructions to the possible testing scenarios. In this paper, we report our experiences in designing explicit strategies in unit testing, as well as adapting the ETS Checklist as optional tool support in a CS1.5 course. With the quantitative and qualitative analysis of the survey responses and lab assignment submissions generated by students, we discuss students' engagement with the ETS Checklists. Our results suggest that students who used the checklist intervention had significantly higher quality in their student-authored test code, in terms of code coverage, compared to those who did not, especially for assignments earlier in the course. We also observed students' unawareness of their need for help in writing high-quality tests.}, journal={PROCEEDINGS OF THE 2023 CONFERENCE ON INNOVATION AND TECHNOLOGY IN COMPUTER SCIENCE EDUCATION, ITICSE 2023, VOL 1}, author={Bai, Gina R. and Sthapit, Sandeep and Heckman, Sarah and Price, Thomas W. and Stolee, Kathryn T.}, year={2023}, pages={194–200} } @article{wang_bacher_isvik_limke_sthapit_shi_tabarsi_tran_catete_barnes_et al._2023, title={Investigating the Impact of On-Demand Code Examples on Novices' Open-Ended Programming Projects}, url={https://doi.org/10.1145/3568813.3600141}, DOI={10.1145/3568813.3600141}, abstractNote={Background and Context: Open-ended programming projects encourage novice students to choose and pursue projects based on their own ideas and interests, and are widely used in many introductory programming courses. However, novice programmers encounter challenges exploring and discovering new ideas, implementing their ideas, and applying unfamiliar programming concepts and APIs. Code examples are one of the primary resources students use to apply code usage patterns and learn API knowledge, but little work has investigated the effect of having access to examples on students’ open-ended programming experience. Objectives: In this work, we evaluate the impact of code examples on open-ended programming, through a study with 46 local high school students in a full-day coding workshop. Method: We conducted a controlled study, where half of the students had full access to 37 code examples using an example browser system called Example Helper and the other half had 5 standard, tutorial examples. Findings: We found that students who had access to all 37 code examples used a significantly larger variety of code APIs, perceived the programming as relatively more creative, but also experienced a higher task load. We also found suggestive evidence of a better post-assignment performance from the example group, showing that some students were able to learn and apply the knowledge they learned from examples to a new programming task.}, journal={PROCEEDINGS OF THE 2023 ACM CONFERENCE ON INTERNATIONAL COMPUTING EDUCATION RESEARCH V.1, ICER 2023 V1}, author={Wang, Wengran and Bacher, John and Isvik, Amy and Limke, Ally and Sthapit, Sandeep and Shi, Yang and Tabarsi, Benyamin T. and Tran, Keith and Catete, Veronica and Barnes, Tiffany and et al.}, year={2023}, pages={464–475} }