diff --git a/.github/workflows/pythonapp.yml b/.github/workflows/pythonapp.yml index 178e728f..ebf154f4 100644 --- a/.github/workflows/pythonapp.yml +++ b/.github/workflows/pythonapp.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} diff --git a/Dockerfile b/Dockerfile index 3884323a..58cb4e62 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6 +FROM python:3.8 WORKDIR /root/challengeutils COPY ./ ./ diff --git a/challengeutils/__version__.py b/challengeutils/__version__.py index 11731085..ce1305bf 100644 --- a/challengeutils/__version__.py +++ b/challengeutils/__version__.py @@ -1 +1 @@ -__version__ = "3.2.0" +__version__ = "4.0.0" diff --git a/challengeutils/helpers.py b/challengeutils/helpers.py deleted file mode 100644 index 11db9757..00000000 --- a/challengeutils/helpers.py +++ /dev/null @@ -1,162 +0,0 @@ -''' -Challenge helper functions -''' -import os -import sys -import time - -import synapseclient -import synapseutils - -from . import challenge, utils - -WORKFLOW_LAST_UPDATED_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorWorkflowLastUpdated" -WORKFLOW_START_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorExecutionStarted" -TIME_REMAINING_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorTimeRemaining" - - -def rename_submission_files(syn, evaluationid, download_location="./", - status="SCORED"): - ''' - This function renames the submission files of an evaluation queue. - For many challenges we require participants to submit files that are - named one thing such as prediction.csv. This function renames them to - - submitter_date_filename - - Args: - syn: synapse object - evaluationid: Id of Evaluation queue - download_location: location to download files to (Default is ./) - status: The submissions to download (Default is SCORED) - ''' - submission_bundle = syn.getSubmissionBundles(evaluationid, status=status) - for sub, status in submission_bundle: - if sub.get("teamId") is not None: - submitter = syn.getTeam(sub.get("teamId"))['name'] - else: - submitter = syn.getUserProfile(sub.userId)['userName'] - date = sub.createdOn - submission_ent = \ - syn.getSubmission(sub.id, downloadLocation=download_location) - filename = os.path.basename(submission_ent.filePath) - newname = submitter+"___"+date+"___"+filename - newname = newname.replace(' ', '_') - os.rename(filename, newname) - print(newname) - - -def create_team_wikis(syn, synid, templateid, tracker_table_synid): - """ - Function that creates wiki pages from a template by looking at teams that - are registered for a challenge. The teams that have a wiki made for them - Are stored into a trackerTable that has columns wikiSynId, and teamId - - Args: - synId: Synapse id of challenge project - templateId: Synapse id of the template - trackerTableSynId: Synapse id of Table that tracks if wiki pages - have been made per team - """ - challenge_ent = syn.get(synid) - - challenge_obj = challenge.get_challenge(syn, challenge_ent) - registered_teams = syn._GET_paginated( - "/challenge/{}/challengeTeam".format(challenge_obj.id)) - for i in registered_teams: - submitted_teams = syn.tableQuery( - "SELECT * FROM {} where teamId = '{}'".format( - tracker_table_synid, i['teamId'])) - if len(submitted_teams.asDataFrame()) == 0: - team = syn.getTeam(i['teamId']) - # The project name is the challenge project name and team name - project = syn.store(synapseclient.Project("{} {}".format( - challenge_ent.name, team.name))) - # Give admin access to the team - syn.setPermissions( - project, i['teamId'], - accessType=['DELETE', 'CHANGE_SETTINGS', 'MODERATE', - 'CREATE', 'READ', 'DOWNLOAD', 'UPDATE', - 'CHANGE_PERMISSIONS']) - wiki_copy = synapseutils.copy(syn, templateid, project.id) - # syn.sendMessage(i[]) - # Store copied synId to tracking table - tracking_table = synapseclient.Table( - tracker_table_synid, [[wiki_copy[templateid], i['teamId']]]) - syn.store(tracking_table) - - -def kill_docker_submission_over_quota(syn, evaluation_id, quota=sys.maxsize): - ''' - Kills any docker container that exceeds the run time quota - Rerunning submissions will require setting TimeRemaining annotation - to a positive integer - - Args: - syn (obj): Synapse object - evaluation_id (int): Synapse evaluation queue id - quota (int): Quota in milliseconds. Default is sys.maxsize. - One hour is 3600000. - ''' - if not isinstance(quota, int): - raise ValueError("quota must be an integer") - if quota <= 0: - raise ValueError("quota must be larger than 0") - - evaluation_query = (f"select * from evaluation_{evaluation_id} where " - "status == 'EVALUATION_IN_PROGRESS'") - query_results = utils.evaluation_queue_query(syn, evaluation_query) - - for result in query_results: - # If last updated and start doesn't exist, set to 0 - last_updated = int(result.get(WORKFLOW_LAST_UPDATED_KEY, 0)) - start = int(result.get(WORKFLOW_START_KEY, 0)) - model_run_time = last_updated - start - if model_run_time > quota: - status = syn.getSubmissionStatus(result['objectId']) - add_annotations = {TIME_REMAINING_KEY: 0} - status = utils.update_single_submission_status(status, - add_annotations) - syn.store(status) - - # Rerunning submissions will require setting this - # annotation to a positive integer - - -def archive_writeup(syn, evaluation, stat="VALIDATED", reArchive=False): - """ - Archive the submissions for the given evaluation queue and - store them in the destination synapse folder. - - :param evaluation: a synapse evaluation queue or its ID - :param query: a query that will return the desired submissions. - At least the ID must be returned. Defaults to: - 'select * from evaluation_[EVAL_ID] where status=="SCORED"' - """ - if type(evaluation) != synapseclient.Evaluation: - evaluation = syn.getEvaluation(evaluation) - - print("\n\nArchiving", evaluation.id, evaluation.name) - print("-" * 60) - - for sub, status in syn.getSubmissionBundles(evaluation, status=stat): - # retrieve file into cache and copy it to destination - checkIfArchived = filter( - lambda x: x.get("key") == "archived", - status.annotations['stringAnnos']) - if len(list(checkIfArchived)) == 0 or reArchive: - projectEntity = synapseclient.Project( - 'Archived {} {} {} {}'.format( - sub.name.replace("&", "+").replace("'", ""), - int(round(time.time() * 1000)), - sub.id, - sub.entityId)) - entity = syn.store(projectEntity) - adminPriv = [ - 'DELETE', 'DOWNLOAD', 'CREATE', 'READ', 'CHANGE_PERMISSIONS', - 'UPDATE', 'MODERATE', 'CHANGE_SETTINGS'] - syn.setPermissions(entity, "3324230", adminPriv) - synapseutils.copy(syn, sub.entityId, entity.id) - archived = {"archived": entity.id} - status = utils.update_single_submission_status(status, archived) - syn.store(status) diff --git a/challengeutils/submission.py b/challengeutils/submission.py index 9a4d6a83..a481ca4e 100644 --- a/challengeutils/submission.py +++ b/challengeutils/submission.py @@ -1,17 +1,26 @@ """Functions that interact with submissions""" import os import re +import sys import time +from typing import Union import pandas as pd import synapseutils -from synapseclient import AUTHENTICATED_USERS, entity, Project +from synapseclient import (AUTHENTICATED_USERS, entity, Project, Synapse, + SubmissionViewSchema) from synapseclient.annotations import to_submission_status_annotations from synapseclient.core.exceptions import SynapseHTTPError +from synapseclient.core.utils import id_of from . import dockertools from . import permissions from . import utils +from . import annotations + +WORKFLOW_LAST_UPDATED_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorWorkflowLastUpdated" +WORKFLOW_START_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorExecutionStarted" +TIME_REMAINING_KEY = "orgSagebionetworksSynapseWorkflowOrchestratorTimeRemaining" def append_writeup_to_main_submission(row, syn): @@ -145,7 +154,7 @@ def _validate_public_permissions(syn, proj): syn_users_perms = syn.getPermissions( proj.entityId, AUTHENTICATED_USERS) public_perms = syn.getPermissions(proj.entityId) - if set(syn_users_perms) == {"READ", "DOWNLOAD"} and \ + if ("READ" in syn_users_perms and "DOWNLOAD" in syn_users_perms) and \ "READ" in public_perms: error = "" @@ -166,7 +175,7 @@ def _validate_admin_permissions(syn, proj, admin): try: # Remove error message if admin has read and download permissions. admin_perms = syn.getPermissions(proj.entityId, admin) - if set(admin_perms) == {"READ", "DOWNLOAD"}: + if "READ" in admin_perms and "DOWNLOAD" in admin_perms: error = "" except SynapseHTTPError as e: @@ -310,3 +319,48 @@ def download_current_lead_sub(syn, submissionid, status, cutoff_annotation, verbose) return path return None + + +def stop_submission_over_quota( + syn: Synapse, + submission_view: Union[str, SubmissionViewSchema], + quota: int = sys.maxsize + ): + """Stops any submission that has exceeded the run time quota by using + submission views. A submission view must first exist. + Rerunning submissions will require setting TimeRemaining annotation + to a positive integer. + + Args: + syn: Synapse connection + submission_view: Submission View or its Synapse Id. + quota: Quota in milliseconds. Default is sys.maxsize. + One hour is 3600000. + + """ + if not isinstance(quota, int): + raise ValueError("quota must be an integer") + if quota <= 0: + raise ValueError("quota must be larger than 0") + + try: + view_query = syn.tableQuery( + f"select {WORKFLOW_LAST_UPDATED_KEY}, {WORKFLOW_START_KEY}, id, " + f"status from {id_of(submission_view)} where " + "status = 'EVALUATION_IN_PROGRESS'" + ) + except SynapseHTTPError as http_error: + raise ValueError( + "Submission view must have columns: " + f"{WORKFLOW_LAST_UPDATED_KEY}, {WORKFLOW_START_KEY}, id" + ) from http_error + + view_querydf = view_query.asDataFrame() + runtime = (view_querydf[WORKFLOW_LAST_UPDATED_KEY] - + view_querydf[WORKFLOW_START_KEY]) + submissions_over_quota_idx = runtime > quota + over_quotadf = view_querydf[submissions_over_quota_idx] + for index, row in over_quotadf.iterrows(): + add_annotations = {TIME_REMAINING_KEY: 0} + annotations.annotate_submission(syn, row['id'], add_annotations, + is_private=False, force=True) diff --git a/docs/about/changelog.rst b/docs/about/changelog.rst index cc40606d..d9037ae6 100644 --- a/docs/about/changelog.rst +++ b/docs/about/changelog.rst @@ -8,6 +8,25 @@ For older releases, visit the `GitHub releases`_. .. _Github releases: https://github.com/Sage-Bionetworks/challengeutils/releases +v4.0.0 +------ +.. Important:: + **Support for synapseclient<2.3.0 is no longer available**; upgrade to the + latest version with: + + .. code:: console + + $ pip install synapseclient --upgrade + +.. Important:: + **Support for Python 3.6 is will be dropped in the later + half of this year. + +- Support `Python` 3.9 +- Deprecate `helpers.py` and create `stop_submissions_over_quota` function +- Fix conditionals when validating permissions for project submissions + + v3.2.0 ------ diff --git a/docs/conf.py b/docs/conf.py index e9f21d4e..16b7f0f9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ author = 'Thomas Yu' # The short X.Y version -version = '3.2' +version = '4.0' # The full version, including alpha/beta/rc tags release = version diff --git a/docs/requirements.txt b/docs/requirements.txt index 3f2b92c8..cc5f9f8e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -pandas==1.0.1 -synapseclient==2.1.0 +pandas==1.1.5 +synapseclient==2.3.0 challengeutils sphinx_rtd_theme \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 4227e279..b481e540 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -pandas==1.0.1 -synapseclient==2.2.2 \ No newline at end of file +pandas +synapseclient==2.3.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 1f852752..6497caa7 100644 --- a/setup.py +++ b/setup.py @@ -11,19 +11,45 @@ with open("README.md", "r") as fh: long_description = fh.read() -setup(name='challengeutils', - version=about["__version__"], - description='Challenge utility functions', - url='https://github.com/Sage-Bionetworks/challengeutils', - author='Thomas Yu', - author_email='thomasyu888@gmail.com', - long_description=long_description, - long_description_content_type="text/markdown", - license='Apache', - packages=find_packages(), - zip_safe=False, - python_requires='>=3.6, <3.9', - scripts=['bin/runqueue.py'], - entry_points={'console_scripts': ['challengeutils = challengeutils.__main__:main']}, - install_requires=['pandas>=1.0.0', - 'synapseclient>=2.2.0']) +setup( + name='challengeutils', + version=about["__version__"], + description='Challenge utility functions', + url='https://github.com/Sage-Bionetworks/challengeutils', + author='Thomas Yu', + author_email='thomasyu888@gmail.com', + long_description=long_description, + long_description_content_type="text/markdown", + license='Apache', + packages=find_packages(), + zip_safe=False, + python_requires='>=3.6, <3.10', + scripts=['bin/runqueue.py'], + entry_points={ + 'console_scripts': ['challengeutils = challengeutils.__main__:main'] + }, + install_requires=['pandas>=1.1.5', + 'synapseclient>=2.3.0'], + project_urls={ + "Documentation": "https://sage-bionetworks.github.io/challengeutils/", + "Source Code": "https://github.com/Sage-Bionetworks/challengeutils", + "Bug Tracker": "https://github.com/Sage-Bionetworks/challengeutils/issues", + }, + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Operating System :: MacOS', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: Unix', + 'Operating System :: POSIX :: Linux', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: Apache Software License', + 'Topic :: Software Development :: Libraries', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Bio-Informatics'], +) diff --git a/tests/test_helpers.py b/tests/test_helpers.py deleted file mode 100644 index bd8de558..00000000 --- a/tests/test_helpers.py +++ /dev/null @@ -1,135 +0,0 @@ -''' -Test challengeutils helper functions -''' -from unittest import mock -from unittest.mock import patch - -import pytest -import synapseclient - -from challengeutils import helpers, utils - -SYN = mock.create_autospec(synapseclient.Synapse) - -LAST_UPDATED_TIME = 1000000 -START_TIME = 10000 -DOCKER_SUB_ANNOTATION = {helpers.WORKFLOW_LAST_UPDATED_KEY: LAST_UPDATED_TIME, - helpers.WORKFLOW_START_KEY: START_TIME, - 'objectId':"12345"} -EVALUATION_ID = 111 - - -def test_noneintquota_kill_docker_submission_over_quota(): - ''' - ValueError is raised when none integer quota is passed in - ''' - with pytest.raises(ValueError, match=r'quota must be an integer'): - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID, - quota="foo") - - -def test_greaterthan0quota_kill_docker_submission_over_quota(): - ''' - ValueError is raised when quota of 0 or less is passed - ''' - with pytest.raises(ValueError, match=r'quota must be larger than 0'): - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID, - quota=0) - with pytest.raises(ValueError, match=r'quota must be larger than 0'): - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID, - quota=-1) - - -def test_noquota_kill_docker_submission_over_quota(): - ''' - Time remaining annotation should not be added - if no quota is set, the default is sys.maxsize. - ''' - with patch.object(utils, "evaluation_queue_query", - return_value=[DOCKER_SUB_ANNOTATION]) as patch_query,\ - patch.object(SYN, - "getSubmissionStatus") as patch_getstatus,\ - patch.object(utils, - "update_single_submission_status") as patch_update, \ - patch.object(SYN, "store") as patch_synstore: - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID) - query = ("select * from evaluation_{} where " - "status == 'EVALUATION_IN_PROGRESS'").format(EVALUATION_ID) - patch_query.assert_called_once_with(SYN, query) - patch_getstatus.assert_not_called() - patch_update.assert_not_called() - patch_synstore.assert_not_called() - - -def test_notdocker_kill_docker_submission_over_quota(): - ''' - Time remaining annotation should not be added - if a submission is not validated/scored by the workflowhook - the submission will not have the right annotations, - ''' - with patch.object(utils, "evaluation_queue_query", - return_value=[{}]) as patch_query,\ - patch.object(SYN, - "getSubmissionStatus") as patch_getstatus,\ - patch.object(utils, - "update_single_submission_status") as patch_update, \ - patch.object(SYN, "store") as patch_synstore: - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID) - query = ("select * from evaluation_{} where " - "status == 'EVALUATION_IN_PROGRESS'").format(EVALUATION_ID) - patch_query.assert_called_once_with(SYN, query) - patch_getstatus.assert_not_called() - patch_update.assert_not_called() - patch_synstore.assert_not_called() - - -def test_underquota_kill_docker_submission_over_quota(): - ''' - Time remaining annotation should not be added - if the model is not over quota - ''' - with patch.object(utils, "evaluation_queue_query", - return_value=[DOCKER_SUB_ANNOTATION]) as patch_query,\ - patch.object(SYN, - "getSubmissionStatus") as patch_getstatus,\ - patch.object(utils, - "update_single_submission_status") as patch_update, \ - patch.object(SYN, "store") as patch_synstore: - # Set quota thats greater than the runtime - quota = LAST_UPDATED_TIME - START_TIME + 9000 - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID, - quota=quota) - query = ("select * from evaluation_{} where " - "status == 'EVALUATION_IN_PROGRESS'").format(EVALUATION_ID) - patch_query.assert_called_once_with(SYN, query) - patch_getstatus.assert_not_called() - patch_update.assert_not_called() - patch_synstore.assert_not_called() - - -def test_overquota_kill_docker_submission_over_quota(): - ''' - Time remaining annotation should not be added - if the model is over the quota - ''' - sub_status = {"annotations": []} - quota_over_annotations = {helpers.TIME_REMAINING_KEY: 0} - with patch.object(utils, "evaluation_queue_query", - return_value=[DOCKER_SUB_ANNOTATION]) as patch_query,\ - patch.object(SYN, "getSubmissionStatus", - return_value=sub_status) as patch_getstatus,\ - patch.object(utils, "update_single_submission_status", - return_value=sub_status) as patch_update, \ - patch.object(SYN, "store") as patch_synstore: - # Set quota thats lower than the runtime - quota = LAST_UPDATED_TIME - START_TIME - 9000 - helpers.kill_docker_submission_over_quota(SYN, EVALUATION_ID, - quota=quota) - query = ("select * from evaluation_{} where " - "status == 'EVALUATION_IN_PROGRESS'").format(EVALUATION_ID) - patch_query.assert_called_once_with(SYN, query) - objectid = DOCKER_SUB_ANNOTATION['objectId'] - patch_getstatus.assert_called_once_with(objectid) - patch_update.assert_called_once_with(sub_status, - quota_over_annotations) - patch_synstore.assert_called_once_with(sub_status) diff --git a/tests/test_submission.py b/tests/test_submission.py index 5f79f463..26b519b3 100644 --- a/tests/test_submission.py +++ b/tests/test_submission.py @@ -5,11 +5,12 @@ from unittest.mock import Mock, patch import uuid +import pandas as pd import pytest import synapseclient from synapseclient.core.exceptions import SynapseHTTPError -from challengeutils import dockertools, submission, utils +from challengeutils import annotations, dockertools, submission, utils SYN = mock.create_autospec(synapseclient.Synapse) SYN.configPath = None @@ -256,3 +257,134 @@ def test_invalid_download_current_lead_sub(): SYN, SUBMISSIONID, "INVALID", "key", verbose=False ) assert dl_file is None + + +class TestStopDockerSubmission(): + def setup_method(self): + self.last_updated_time = 1000000 + self.start_time = 10000 + self.submission_viewdf = pd.DataFrame([{ + submission.WORKFLOW_LAST_UPDATED_KEY: self.last_updated_time, + submission.WORKFLOW_START_KEY: self.start_time, + 'id': "12345" + }]) + self.mock_tablequery = Mock() + self.fileview_id = 111 + + def test_noneintquota(self): + ''' + ValueError is raised when none integer quota is passed in + ''' + with pytest.raises(ValueError, match=r'quota must be an integer'): + submission.stop_submission_over_quota(SYN, self.fileview_id, + quota="foo") + + @pytest.mark.parametrize("quota", [0, -1]) + def test_greaterthan0quota(self, quota): + ''' + ValueError is raised when quota of 0 or less is passed + ''' + with pytest.raises(ValueError, match=r'quota must be larger than 0'): + submission.stop_submission_over_quota(SYN, self.fileview_id, + quota=quota) + + def test_queryfail(self): + ''' + ValueError is raised tableQuery fails + ''' + with patch.object(SYN, "tableQuery", + side_effect=SynapseHTTPError),\ + pytest.raises(ValueError, + match=r'Submission view must have columns:*'): + submission.stop_submission_over_quota(SYN, self.fileview_id) + + def test_noquota(self): + ''' + Time remaining annotation should not be added + if no quota is set, the default is sys.maxsize. + ''' + with patch.object(SYN, "tableQuery", + return_value=self.mock_tablequery) as patch_query,\ + patch.object(self.mock_tablequery, "asDataFrame", + return_value=self.submission_viewdf),\ + patch.object(annotations, "annotate_submission") as patch_store: + submission.stop_submission_over_quota(SYN, self.fileview_id) + query = ( + f"select {submission.WORKFLOW_LAST_UPDATED_KEY}, " + f"{submission.WORKFLOW_START_KEY}, id, " + f"status from {self.fileview_id} where " + "status = 'EVALUATION_IN_PROGRESS'" + ) + patch_query.assert_called_once_with(query) + patch_store.assert_not_called() + + def test_notstartedsubmission(self): + ''' + Time remaining annotation should not be added + if a submission is not validated/scored by the workflowhook + the submission will not have the right annotations, + ''' + self.submission_viewdf.loc[ + 0, submission.WORKFLOW_LAST_UPDATED_KEY + ] = float('nan') + with patch.object(SYN, "tableQuery", + return_value=self.mock_tablequery) as patch_query,\ + patch.object(self.mock_tablequery, "asDataFrame", + return_value=self.submission_viewdf),\ + patch.object(annotations, "annotate_submission") as patch_store: + submission.stop_submission_over_quota(SYN, self.fileview_id) + query = ( + f"select {submission.WORKFLOW_LAST_UPDATED_KEY}, " + f"{submission.WORKFLOW_START_KEY}, id, " + f"status from {self.fileview_id} where " + "status = 'EVALUATION_IN_PROGRESS'" + ) + patch_query.assert_called_once_with(query) + patch_store.assert_not_called() + + def test_underquota(self): + ''' + Time remaining annotation should not be added + if the model is not over quota + ''' + with patch.object(SYN, "tableQuery", + return_value=self.mock_tablequery) as patch_query,\ + patch.object(self.mock_tablequery, "asDataFrame", + return_value=self.submission_viewdf),\ + patch.object(annotations, "annotate_submission") as patch_store: + quota = self.last_updated_time - self.start_time + 9000 + submission.stop_submission_over_quota(SYN, self.fileview_id, + quota=quota) + query = ( + f"select {submission.WORKFLOW_LAST_UPDATED_KEY}, " + f"{submission.WORKFLOW_START_KEY}, id, " + f"status from {self.fileview_id} where " + "status = 'EVALUATION_IN_PROGRESS'" + ) + patch_query.assert_called_once_with(query) + patch_store.assert_not_called() + + def test_overquota(self): + ''' + Time remaining annotation should not be added + if the model is over the quota + ''' + with patch.object(SYN, "tableQuery", + return_value=self.mock_tablequery) as patch_query,\ + patch.object(self.mock_tablequery, "asDataFrame", + return_value=self.submission_viewdf),\ + patch.object(annotations, "annotate_submission") as patch_store: + quota = self.last_updated_time - self.start_time - 9000 + submission.stop_submission_over_quota(SYN, self.fileview_id, + quota=quota) + query = ( + f"select {submission.WORKFLOW_LAST_UPDATED_KEY}, " + f"{submission.WORKFLOW_START_KEY}, id, " + f"status from {self.fileview_id} where " + "status = 'EVALUATION_IN_PROGRESS'" + ) + patch_query.assert_called_once_with(query) + patch_store.assert_called_once_with( + SYN, "12345", {submission.TIME_REMAINING_KEY: 0}, + is_private=False, force=True + )