@inproceedings{1352cfecef7e4b4bbd65ec1d3c46a93d,
title = "Executable Exams: Taxonomy, Implementation and Prospects",
abstract = "Traditionally exams in introductory programming courses have tended to be multiple choice, or {"}paper-based{"}coding exams in which students hand write code. This does not reflect how students typically write and are assessed on programming assignments in which they write code on a computer and are able to validate and assess their code using an auto-grading system. Executable exams are exams in which students are given programming problems, write code using a computer within a development environment and submissions are digitally validated or executed. This format is far more consistent with how students engage in programming assignments. This paper explores the executable exam format and attempts to gauge the state-of-the-practice and how prevalent it is. First, we formulate a taxonomy of characteristics of executable exams, identifying common aspects and various levels of flexibility. then give two case studies: one in which executable exams have been utilized for nearly 10 years and another in which they've been recently adopted. Finally, we provide results from faculty surveys providing evidence that, though not standard practice, the use of executable exams is not uncommon and appears to be on the rise.",
keywords = "assessment, autograder, executable exams, introductory computer science, paper-based exams, taxonomy",
author = "Chris Bourke and Yael Erez and Orit Hazzan",
note = "Publisher Copyright: {\textcopyright} 2023 ACM.; 54th ACM Technical Symposium on Computer Science Education, SIGCSE 2023 ; Conference date: 15-03-2023 Through 18-03-2023",
year = "2023",
month = mar,
day = "2",
doi = "10.1145/3545945.3569724",
language = "الإنجليزيّة",
series = "SIGCSE 2023 - Proceedings of the 54th ACM Technical Symposium on Computer Science Education",
pages = "381--387",
booktitle = "SIGCSE 2023 - Proceedings of the 54th ACM Technical Symposium on Computer Science Education",
}