Skip to content

Commit

Permalink
Support Python 3.12 (#4919)
Browse files Browse the repository at this point in the history
* Add Python 3.12 to CI

* Update sphinx-autoapi and astroid to deal with crash

pylint-dev/pylint#8782

* Remove dead comment

* Add rules to 3.11 build

* update htcondor

* Update use of HTcondor in appliance build

* Ensure tests are instanced and don't jumble relative paths + debug logging

* oops, update utilsTest too

* is this a pytest issue?

* Add some more log messages

* Fix time.sleep

* Remove the debug statement in docker

* remove logger print statements in utilsTest.py and pin pytest

* Up the timeout on some tests (possiby a timing issue)

* Up the timeout on more tests

* Up the pytest version again
  • Loading branch information
stxue1 authored May 9, 2024
1 parent 56b0dc3 commit 115be67
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 25 deletions.
37 changes: 33 additions & 4 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ variables:
PYTHONIOENCODING: "utf-8"
DEBIAN_FRONTEND: "noninteractive"
TOIL_OWNER_TAG: "shared"
MAIN_PYTHON_PKG: "python3.11"
MAIN_PYTHON_PKG: "python3.12"
# Used to tell pytest which tests to be run by specifying markers,
# Allows partitioning of tests to prevent duplicate running of tests in different jobs.
# Currently specifies special tests that are not run by quick_test_offline.
Expand Down Expand Up @@ -67,8 +67,8 @@ lint:
script:
- pwd
- ${MAIN_PYTHON_PKG} -m virtualenv venv && . venv/bin/activate && make prepare && make develop extras=[all]
- python3.11 -m pip freeze
- python3.11 --version
- ${MAIN_PYTHON_PKG} -m pip freeze
- ${MAIN_PYTHON_PKG} --version
- make mypy
- make docs
# - make diff_pydocstyle_report
Expand Down Expand Up @@ -191,6 +191,12 @@ py310_main:
- TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests=src/toil/test/lib

py311_appliance_build:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
- if: $CI_COMMIT_BRANCH =~ /.*3\.11.*/
stage: basic_tests
script:
- pwd
Expand All @@ -201,14 +207,37 @@ py311_appliance_build:

py311_main:
rules:
- if: $CI_PIPELINE_SOURCE != "schedule"
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH =~ /.*-fix-ci/
- if: $CI_COMMIT_BRANCH =~ /.*3\.11.*/
stage: basic_tests
script:
- pwd
- python3.11 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.11 && pip install -U pip wheel && make prepare && make develop extras=[all] packages='htcondor==10.2.3'
- make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils"
- TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests=src/toil/test/lib

py312_appliance_build:
stage: basic_tests
script:
- pwd
- python3.12 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 && pip install -U pip wheel && pip install -U build && make prepare && pip install pycparser && make develop extras=[all] packages='htcondor==23.6.1'
# This reads GITLAB_SECRET_FILE_QUAY_CREDENTIALS
- python setup_gitlab_docker.py
- make push_docker

py312_main:
rules:
- if: $CI_PIPELINE_SOURCE != "schedule"
stage: basic_tests
script:
- pwd
- python3.12 -m virtualenv venv && . venv/bin/activate && curl -sS https://bootstrap.pypa.io/get-pip.py | python3.12 && pip install -U pip wheel && make prepare && make develop extras=[all] packages='htcondor==23.6.1'
- make test threads="${TEST_THREADS}" tests="src/toil/test/src src/toil/test/utils"
- TOIL_SKIP_DOCKER=true make test threads="${TEST_THREADS}" tests=src/toil/test/lib

slurm_test:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
Expand Down
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,11 @@ clean_sdist:
# Setting SET_OWNER_TAG will tag cloud resources so that UCSC's cloud murder bot won't kill them.
test: check_venv check_build_reqs
TOIL_OWNER_TAG="shared" \
python -m pytest --durations=0 --strict-markers --log-level DEBUG --log-cli-level INFO -r s $(cov) -n $(threads) --dist loadscope $(tests) -m "$(marker)" --color=yes
python -m pytest --log-format="%(asctime)s %(levelname)s %(message)s" --durations=0 --strict-markers --log-level DEBUG --log-cli-level INFO -r s $(cov) -n $(threads) --dist loadscope $(tests) -m "$(marker)" --color=yes

test_debug: check_venv check_build_reqs
TOIL_OWNER_TAG="$(whoami)" \
python -m pytest --durations=0 --strict-markers --log-level DEBUG -s -o log_cli=true --log-cli-level DEBUG -r s $(tests) -m "$(marker)" --tb=native --maxfail=1
python -m pytest --log-format="%(asctime)s %(levelname)s %(message)s" --durations=0 --strict-markers --log-level DEBUG -s -o log_cli=true --log-cli-level DEBUG -r s $(tests) -m "$(marker)" --tb=native --maxfail=1 --color=yes


# This target will skip building docker and all docker based tests
Expand All @@ -148,12 +148,12 @@ test_offline: check_venv check_build_reqs
@printf "$(cyan)All docker related tests will be skipped.$(normal)\n"
TOIL_SKIP_DOCKER=True \
TOIL_SKIP_ONLINE=True \
python -m pytest -vv --timeout=600 --strict-markers --log-level DEBUG --log-cli-level INFO $(cov) -n $(threads) --dist loadscope $(tests) -m "$(marker)"
python -m pytest --log-format="%(asctime)s %(levelname)s %(message)s" -vv --timeout=600 --strict-markers --log-level DEBUG --log-cli-level INFO $(cov) -n $(threads) --dist loadscope $(tests) -m "$(marker)" --color=yes

# This target will run about 1 minute of tests, and stop at the first failure
test_1min: check_venv check_build_reqs
TOIL_SKIP_DOCKER=True \
python -m pytest -vv --timeout=10 --strict-markers --log-level DEBUG --log-cli-level INFO --maxfail=1 src/toil/test/batchSystems/batchSystemTest.py::SingleMachineBatchSystemTest::test_run_jobs src/toil/test/batchSystems/batchSystemTest.py::KubernetesBatchSystemBenchTest src/toil/test/server/serverTest.py::ToilWESServerBenchTest::test_get_service_info src/toil/test/cwl/cwlTest.py::CWLWorkflowTest::test_run_colon_output src/toil/test/jobStores/jobStoreTest.py::FileJobStoreTest::testUpdateBehavior -m "$(marker)"
python -m pytest --log-format="%(asctime)s %(levelname)s %(message)s" -vv --timeout=10 --strict-markers --log-level DEBUG --log-cli-level INFO --maxfail=1 src/toil/test/batchSystems/batchSystemTest.py::SingleMachineBatchSystemTest::test_run_jobs src/toil/test/batchSystems/batchSystemTest.py::KubernetesBatchSystemBenchTest src/toil/test/server/serverTest.py::ToilWESServerBenchTest::test_get_service_info src/toil/test/cwl/cwlTest.py::CWLWorkflowTest::test_run_colon_output src/toil/test/jobStores/jobStoreTest.py::FileJobStoreTest::testUpdateBehavior -m "$(marker)" --color=yes

ifdef TOIL_DOCKER_REGISTRY

Expand Down
5 changes: 2 additions & 3 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ pytest-timeout>=1.4.2,<3
stubserver>=1.1,<2
setuptools>=65.5.1,<70
sphinx>=7,<8
sphinx-autoapi>=2.1.1,<3
# astroid 3 won't work until some sphinx-autoapi release after 2.1.1
astroid>=2.15,<3
sphinx-autoapi>=3,<4
astroid>=3,<4
sphinx-autodoc-typehints>=1.24.0,<3
sphinxcontrib-autoprogram==0.1.9
cwltest>=2.2.20211116163652
Expand Down
3 changes: 3 additions & 0 deletions src/toil/test/docs/scriptsTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import subprocess
import sys
import unittest
import pytest

from typing import List

Expand Down Expand Up @@ -65,6 +66,7 @@ def testStats(self):
# This script asks for 4 cores but we might need to run the tests in only 3.
self.checkExitCode("tutorial_stats.py", ["--scale=0.5"])

@pytest.mark.timeout(1200)
def testDynamic(self):
self.checkExitCode("tutorial_dynamic.py")

Expand Down Expand Up @@ -110,6 +112,7 @@ def testMultiplejobs3(self):
"second or third.*Hello world, I have a message: second or third.*Hello world,"
" I have a message: last")

@pytest.mark.timeout(1200)
def testPromises2(self):
self.checkExpectedOut("tutorial_promises2.py",
"['00000', '00001', '00010', '00011', '00100', '00101', '00110', '00111',"
Expand Down
30 changes: 16 additions & 14 deletions src/toil/test/utils/utilsTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def setUp(self):
super().setUp()
self.tempDir = self._createTempDir()
self.tempFile = get_temp_file(rootDir=self.tempDir)
self.outputFile = get_temp_file(rootDir=self.tempDir)
self.outputFile = 'someSortedStuff.txt'
self.toilDir = os.path.join(self.tempDir, "jobstore")
self.assertFalse(os.path.exists(self.toilDir))
Expand All @@ -73,9 +74,9 @@ def setUp(self):
'-m',
'toil.test.sort.sort',
f'file:{self.toilDir}',
f'--fileToSort={self.tempFile}',
f'--outputFile={self.outputFile}',
'--clean=never',
'--numLines=1',
'--lineLength=1'
]

self.restart_sort_workflow_cmd = [
Expand All @@ -91,7 +92,7 @@ def tearDown(self):
if os.path.exists(self.toilDir):
shutil.rmtree(self.toilDir)

for f in ['fileToSort.txt', 'sortedFile.txt', 'output.txt']:
for f in [self.tempFile, self.outputFile, os.path.join(self.tempDir, "output.txt")]:
if os.path.exists(f):
os.remove(f)

Expand Down Expand Up @@ -314,14 +315,14 @@ def check_status(self, status, status_fn, seconds=20):
def testGetPIDStatus(self):
"""Test that ToilStatus.getPIDStatus() behaves as expected."""
wf = subprocess.Popen(self.sort_workflow_cmd)
self.check_status('RUNNING', status_fn=ToilStatus.getPIDStatus, seconds=20)
self.check_status('RUNNING', status_fn=ToilStatus.getPIDStatus, seconds=60)
wf.wait()
self.check_status('COMPLETED', status_fn=ToilStatus.getPIDStatus)
self.check_status('COMPLETED', status_fn=ToilStatus.getPIDStatus, seconds=60)

# TODO: we need to reach into the FileJobStore's files and delete this
# shared file. We assume we know its internal layout.
os.remove(os.path.join(self.toilDir, 'files/shared/pid.log'))
self.check_status('QUEUED', status_fn=ToilStatus.getPIDStatus)
self.check_status('QUEUED', status_fn=ToilStatus.getPIDStatus, seconds=60)

def testGetStatusFailedToilWF(self):
"""
Expand All @@ -331,32 +332,32 @@ def testGetStatusFailedToilWF(self):
"""
# --badWorker is set to force failure.
wf = subprocess.Popen(self.sort_workflow_cmd + ['--badWorker=1'])
self.check_status('RUNNING', status_fn=ToilStatus.getStatus)
self.check_status('RUNNING', status_fn=ToilStatus.getStatus, seconds=60)
wf.wait()
self.check_status('ERROR', status_fn=ToilStatus.getStatus)
self.check_status('ERROR', status_fn=ToilStatus.getStatus, seconds=60)

@needs_cwl
@needs_docker
def testGetStatusFailedCWLWF(self):
"""Test that ToilStatus.getStatus() behaves as expected with a failing CWL workflow."""
# --badWorker is set to force failure.
cmd = ['toil-cwl-runner', '--jobStore', self.toilDir, '--clean=never', '--badWorker=1',
'src/toil/test/cwl/sorttool.cwl', '--reverse', '--input', 'src/toil/test/cwl/whale.txt']
'src/toil/test/cwl/sorttool.cwl', '--reverse', '--input', 'src/toil/test/cwl/whale.txt', f'--outdir={self.tempDir}']
wf = subprocess.Popen(cmd)
self.check_status('RUNNING', status_fn=ToilStatus.getStatus)
self.check_status('RUNNING', status_fn=ToilStatus.getStatus, seconds=60)
wf.wait()
self.check_status('ERROR', status_fn=ToilStatus.getStatus)
self.check_status('ERROR', status_fn=ToilStatus.getStatus, seconds=60)

@needs_cwl
@needs_docker
def testGetStatusSuccessfulCWLWF(self):
"""Test that ToilStatus.getStatus() behaves as expected with a successful CWL workflow."""
cmd = ['toil-cwl-runner', '--jobStore', self.toilDir, '--clean=never',
'src/toil/test/cwl/sorttool.cwl', '--reverse', '--input', 'src/toil/test/cwl/whale.txt']
'src/toil/test/cwl/sorttool.cwl', '--reverse', '--input', 'src/toil/test/cwl/whale.txt', f'--outdir={self.tempDir}']
wf = subprocess.Popen(cmd)
self.check_status('RUNNING', status_fn=ToilStatus.getStatus, seconds=20)
self.check_status('RUNNING', status_fn=ToilStatus.getStatus, seconds=60)
wf.wait()
self.check_status('COMPLETED', status_fn=ToilStatus.getStatus)
self.check_status('COMPLETED', status_fn=ToilStatus.getStatus, seconds=60)

@needs_cwl
@patch('builtins.print')
Expand All @@ -375,6 +376,7 @@ def testPrintJobLog(self, mock_print):
args, kwargs = mock_print.call_args
self.assertIn('invalidcommand', args[0])

@pytest.mark.timeout(1200)
def testRestartAttribute(self):
"""
Test that the job store is only destroyed when we observe a successful workflow run.
Expand Down

0 comments on commit 115be67

Please sign in to comment.