From 242df8e5af7438268ca9437390e99bb7dac01d51 Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Thu, 31 Aug 2017 13:12:08 -0600 Subject: [PATCH 1/4] rm wrapper.py --- .travis.yml | 14 +-- qiita_ware/wrapper.py | 203 ------------------------------------------ 2 files changed, 4 insertions(+), 213 deletions(-) delete mode 100644 qiita_ware/wrapper.py diff --git a/.travis.yml b/.travis.yml index 1318803fe..310cf7d89 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,8 +7,7 @@ env: matrix: - TEST_ADD_STUDIES=False COVER_PACKAGE=qiita_db - TEST_ADD_STUDIES=False COVER_PACKAGE=qiita_pet - - TEST_ADD_STUDIES=False COVER_PACKAGE="qiita_core qiita_ware" - - TEST_ADD_STUDIES=True + - TEST_ADD_STUDIES=True COVER_PACKAGE="qiita_core qiita_ware" before_install: - redis-server --version - wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh @@ -51,19 +50,14 @@ before_script: # export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg; # fi - qiita-env make --no-load-ontologies - - | - if [ ${TEST_ADD_STUDIES} == "False" ]; then - qiita pet webserver --no-build-docs start & - fi + - qiita pet webserver --no-build-docs start & script: - sleep 5 - - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi - - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi - if [ ${TEST_ADD_STUDIES} == "False" ]; then qiita-test-install ; fi - if [ ${TEST_ADD_STUDIES} == "False" ]; then nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE; fi + - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi + - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi - flake8 qiita_* setup.py scripts/* - - ls -R /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_pet/support_files/doc/ - - qiita pet webserver addons: postgresql: "9.3" services: diff --git a/qiita_ware/wrapper.py b/qiita_ware/wrapper.py deleted file mode 100644 index 35fa9e62c..000000000 --- a/qiita_ware/wrapper.py +++ /dev/null @@ -1,203 +0,0 @@ -# ----------------------------------------------------------------------------- -# Copyright (c) 2014--, The Qiita Development Team. -# -# Distributed under the terms of the BSD 3-clause License. -# -# The full license is in the file LICENSE, distributed with this software. -# ----------------------------------------------------------------------------- - -from __future__ import division - -from shutil import rmtree -from os import remove -from sys import stderr - -import networkx as nx -from moi.job import submit, ctxs, ctx_default - - -class ParallelWrapper(object): - """Base class for any parallel code""" - def __init__(self, retain_temp_files=False, block=True, - moi_update_status=None, moi_context=None, moi_parent_id=None): - self._retain_temp_files = retain_temp_files - self._block = block - self._job_graph = nx.DiGraph() - self._logger = None - self._filepaths_to_remove = [] - self._dirpaths_to_remove = [] - self._update_status = moi_update_status - self._context = ctxs.get(moi_context, ctxs[ctx_default]) - self._group = moi_parent_id - - def _construct_job_graph(self, *args, **kwargs): - """Constructs the workflow graph with the jobs to execute - - Raises - ------ - NotImplementedError - If not overwritten in a subclass - """ - raise NotImplementedError("This method should be overwritten by the " - "subclass") - - def _failure_callback(self, msg=None): - """Callback to execute in case that any of the job nodes failed - - Parameters - ---------- - msg : str - Any message generated from the failure - """ - pass - - def _validate_execution_order(self, results): - """Makes sure that the execution order represented in _job_graph has - been respected - - Parameters - ---------- - results : dict of {Node: AsyncResult} - The AsyncResult objects of the executed jobs - """ - # Adapted from - # http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html - self._logger.write("Validating execution order... ") - for node in self._job_graph: - started = results[node].metadata.started - if started is None: - self._logger.write("Job %s: starting time not available" - % node) - continue - - for parent in self._job_graph.predecessors(node): - finished = results[parent].metadata.completed - if finished is None: - self._logger.write("Job %s: finish time not available" - % parent) - continue - - if started < finished: - self._logger.write( - "Job order not respected: %s should have happened " - "after %s\n" % (node, parent)) - - self._logger.write("Done\n") - - def _validate_job_status(self, results): - """Validates that all jobs executed finished correctly - - Parameters - ---------- - results : dict of {Node: AsyncResult} - The AsyncResult objects of the executed jobs - """ - self._logger.write("\nValidating job status:\n") - errored = False - callback_msg = [] - for node, ar in results.items(): - msg = ["\nJob %s: " % node] - if ar.successful(): - msg.append("Success\n") - else: - errored = True - msg.append("Error\n") - try: - job_result = ar.get() - except Exception, e: - job_result = e - msg.append("\tJob results: %s\n" - "\tPython output: %s\n" - "\tStandard output: %s\n" - "\tStandard error: %s\n" - % (job_result, ar.pyout, ar.stdout, ar.stderr)) - callback_msg.append(''.join(msg)) - self._logger.write(''.join(msg)) - - if errored: - self._failure_callback(msg='\n'.join(callback_msg)) - - def _clean_up_paths(self): - """Removes the temporary paths""" - if not self._retain_temp_files: - self._logger.write("\nCleaning up temporary files") - for fp in self._filepaths_to_remove: - remove(fp) - for dp in self._dirpaths_to_remove: - rmtree(dp) - - def _job_blocker(self, results): - # Block until all jobs are done - self._logger.write("\nWaiting for all jobs to finish... ") - self._context.bv.wait(results.values()) - self._logger.write("Done\n") - self._validate_job_status(results) - self._validate_execution_order(results) - self._clean_up_paths() - if self._logger != stderr: - self._logger.close() - - def _submit_with_deps(self, deps, name, func, *args, **kwargs): - """Submit with dependencies - - Parameters - ---------- - deps : list of AsyncResult - AsyncResults that this new job depend on - name : str - A job name - func : function - The function to submit - - Returns - ------- - AsyncResult - The result returned by IPython's apply_async. - """ - parent_id = self._group - url = None - - with self._context.bv.temp_flags(after=deps, block=False): - _, _, ar = submit(self._context, parent_id, name, url, func, - *args, **kwargs) - return ar - - def __call__(self, *args, **kwargs): - self._construct_job_graph(*args, **kwargs) - - if self._logger is None: - self._logger = stderr - - # Adapted from - # http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html - async_results = {} - for node_name in nx.topological_sort(self._job_graph): - node = self._job_graph.node[node_name] - requires_deps = node.get('requies_deps', False) - - func = node['func'] - args = node['args'] - job_name = node['job_name'] - kwargs = {} - - deps = [] - dep_results = {} - kwargs['dep_results'] = dep_results - for predecessor_name in self._job_graph.predecessors(node_name): - predecessor_result = async_results[predecessor_name] - deps.append(predecessor_result) - - if requires_deps: - dep_results[predecessor_name] = predecessor_result.get() - - self._logger.write("Submitting %s: %s %s...\n " % (node_name, - func.__name__, - args)) - - async_results[node_name] = \ - self._submit_with_deps(deps, job_name, func, *args, **kwargs) - - self._logger.write("Done\n") - - if self._block: - self._job_blocker(async_results) From fbf5402efe3c1d631dcb9715057b0f55476c273f Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Thu, 31 Aug 2017 13:31:28 -0600 Subject: [PATCH 2/4] moving qiita pet in .travis.yml --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 310cf7d89..1a1dbca4f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -50,13 +50,13 @@ before_script: # export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg; # fi - qiita-env make --no-load-ontologies - - qiita pet webserver --no-build-docs start & script: - sleep 5 - - if [ ${TEST_ADD_STUDIES} == "False" ]; then qiita-test-install ; fi - - if [ ${TEST_ADD_STUDIES} == "False" ]; then nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE; fi - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi + - qiita pet webserver --no-build-docs start & + - if [ ${TEST_ADD_STUDIES} == "False" ]; then qiita-test-install ; fi + - if [ ${TEST_ADD_STUDIES} == "False" ]; then nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE; fi - flake8 qiita_* setup.py scripts/* addons: postgresql: "9.3" From 8051cbb6859be9052733ad70206e65efb0faa8ef Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Thu, 31 Aug 2017 13:54:12 -0600 Subject: [PATCH 3/4] some other .travis.yml fixes --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1a1dbca4f..aa2925e07 100644 --- a/.travis.yml +++ b/.travis.yml @@ -50,13 +50,13 @@ before_script: # export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg; # fi - qiita-env make --no-load-ontologies + - qiita-test-install script: - sleep 5 - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi - qiita pet webserver --no-build-docs start & - - if [ ${TEST_ADD_STUDIES} == "False" ]; then qiita-test-install ; fi - - if [ ${TEST_ADD_STUDIES} == "False" ]; then nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE; fi + - nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE - flake8 qiita_* setup.py scripts/* addons: postgresql: "9.3" From df5f5c8df66a812caf04c31d45c65cd7edb6f8f1 Mon Sep 17 00:00:00 2001 From: Antonio Gonzalez Date: Thu, 31 Aug 2017 14:19:17 -0600 Subject: [PATCH 4/4] addressing @josenavas comment --- .travis.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index aa2925e07..445b48ba8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -53,11 +53,14 @@ before_script: - qiita-test-install script: - sleep 5 - - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi - - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi - qiita pet webserver --no-build-docs start & + - QIITA_PID=$! - nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=$COVER_PACKAGE + - kill $QIITA_PID + - if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi + - if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi - flake8 qiita_* setup.py scripts/* + - qiita pet webserver addons: postgresql: "9.3" services: