Skip to content

Commit

Permalink
Skip duplicated grade_ids during autograding
Browse files Browse the repository at this point in the history
  • Loading branch information
tuncbkose authored and Gehock committed Oct 4, 2024
1 parent 5cb3e59 commit bda844c
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 4 deletions.
4 changes: 4 additions & 0 deletions nbgrader/converters/autograde.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from ..preprocessors import (
AssignLatePenalties, ClearOutput, DeduplicateIds, OverwriteCells, SaveAutoGrades,
Execute, LimitOutput, OverwriteKernelspec, CheckCellMetadata, IgnorePattern)
from ..postprocessors import CheckDuplicateFlag
from ..api import Gradebook, MissingEntry
from .. import utils

Expand Down Expand Up @@ -196,3 +197,6 @@ def convert_single_notebook(self, notebook_filename: str) -> None:
super(Autograde, self).convert_single_notebook(notebook_filename)
finally:
self._sanitizing = True

self.log.info(f"Post-processing {notebook_filename}")
CheckDuplicateFlag(notebook_filename)
8 changes: 8 additions & 0 deletions nbgrader/converters/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from ..coursedir import CourseDirectory
from ..utils import find_all_files, rmtree, remove
from ..preprocessors.execute import UnresponsiveKernelError
from ..postprocessors import DuplicateIdError
from ..nbgraderformat import SchemaTooOldError, SchemaTooNewError
import typing
from nbconvert.exporters.exporter import ResourcesDict
Expand Down Expand Up @@ -405,6 +406,13 @@ def _handle_failure(gd: typing.Dict[str, str]) -> None:
errors.append((gd['assignment_id'], gd['student_id']))
_handle_failure(gd)

except DuplicateIdError:
self.log.error(
f"Encountered a cell with duplicate id when processing {notebook_filename}. "
"Autograding with skipping cells marked as duplicate."
)
errors.append((gd['assignment_id'], gd['student_id']))

# Raise unhandled exceptions for the outer try/except
except Exception as e:
raise e
Expand Down
6 changes: 6 additions & 0 deletions nbgrader/nbgraderformat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,9 @@
from .v4 import reads_v4 as reads, writes_v4 as writes

SCHEMA_VERSION = MetadataValidator.schema_version

# Metadata required by latest schema, along with default values
SCHEMA_REQUIRED = {"schema_version": 4,
"grade": False,
"locked": False,
"solution": False}
6 changes: 6 additions & 0 deletions nbgrader/postprocessors/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from .checkduplicateflag import CheckDuplicateFlag, DuplicateIdError

__all__ = [
"CheckDuplicateFlag",
"DuplicateIdError"
]
27 changes: 27 additions & 0 deletions nbgrader/postprocessors/checkduplicateflag.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import nbformat
from nbformat.notebooknode import NotebookNode


class DuplicateIdError(Exception):

def __init__(self, message):
super(DuplicateIdError, self).__init__(message)


class CheckDuplicateFlag:

def __init__(self, notebook_filename):
with open(notebook_filename, encoding="utf-8") as f:
nb = nbformat.read(f, as_version=nbformat.NO_CONVERT)
self.postprocess(nb)

def postprocess(self, nb: NotebookNode):
for cell in nb.cells:
self.postprocess_cell(cell)

@staticmethod
def postprocess_cell(cell: NotebookNode):
if "nbgrader" in cell.metadata and "duplicate" in cell.metadata.nbgrader:
del cell.metadata.nbgrader["duplicate"]
msg = "Detected cells with same ids"
raise DuplicateIdError(msg)
6 changes: 5 additions & 1 deletion nbgrader/preprocessors/deduplicateids.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .. import utils
from . import NbGraderPreprocessor
from ..nbgraderformat import SCHEMA_REQUIRED
from nbconvert.exporters.exporter import ResourcesDict
from nbformat.notebooknode import NotebookNode
from typing import Tuple
Expand Down Expand Up @@ -27,7 +28,10 @@ def preprocess_cell(self,
grade_id = cell.metadata.nbgrader['grade_id']
if grade_id in self.grade_ids:
self.log.warning("Cell with id '%s' exists multiple times!", grade_id)
cell.metadata.nbgrader = {}
# Replace problematic metadata and leave message
cell.source = "# THIS CELL CONTAINED A DUPLICATE ID DURING AUTOGRADING\n" + cell.source
cell.metadata.nbgrader = SCHEMA_REQUIRED #| {"duplicate": True} # doesn't work in python 3.8
cell.metadata.nbgrader["duplicate"] = True
else:
self.grade_ids.add(grade_id)

Expand Down
11 changes: 8 additions & 3 deletions nbgrader/tests/preprocessors/test_deduplicateids.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from nbformat.v4 import new_notebook

from ...preprocessors import DeduplicateIds
from ...nbgraderformat import SCHEMA_REQUIRED
from .base import BaseTestPreprocessor
from .. import (
create_grade_cell, create_solution_cell, create_locked_cell)
Expand All @@ -14,6 +15,10 @@ def preprocessor():
return pp


EXPECTED_DUPLICATE_METADATA = SCHEMA_REQUIRED # | {"duplicate": True} # doesn't work in python 3.8
EXPECTED_DUPLICATE_METADATA["duplicate"] = True


class TestDeduplicateIds(BaseTestPreprocessor):

def test_duplicate_grade_cell(self, preprocessor):
Expand All @@ -26,7 +31,7 @@ def test_duplicate_grade_cell(self, preprocessor):
nb, resources = preprocessor.preprocess(nb, {})

assert nb.cells[0].metadata.nbgrader != {}
assert nb.cells[1].metadata.nbgrader == {}
assert nb.cells[1].metadata.nbgrader == EXPECTED_DUPLICATE_METADATA

def test_duplicate_solution_cell(self, preprocessor):
cell1 = create_solution_cell("hello", "code", "foo")
Expand All @@ -38,7 +43,7 @@ def test_duplicate_solution_cell(self, preprocessor):
nb, resources = preprocessor.preprocess(nb, {})

assert nb.cells[0].metadata.nbgrader != {}
assert nb.cells[1].metadata.nbgrader == {}
assert nb.cells[1].metadata.nbgrader == EXPECTED_DUPLICATE_METADATA

def test_duplicate_locked_cell(self, preprocessor):
cell1 = create_locked_cell("hello", "code", "foo")
Expand All @@ -50,4 +55,4 @@ def test_duplicate_locked_cell(self, preprocessor):
nb, resources = preprocessor.preprocess(nb, {})

assert nb.cells[0].metadata.nbgrader != {}
assert nb.cells[1].metadata.nbgrader == {}
assert nb.cells[1].metadata.nbgrader == EXPECTED_DUPLICATE_METADATA

0 comments on commit bda844c

Please sign in to comment.