-
Notifications
You must be signed in to change notification settings - Fork 10
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
46cb844
commit 704a949
Showing
5 changed files
with
100 additions
and
81 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,2 @@ | ||
[settings] | ||
known_third_party = adaptive,cloudpickle,ipyparallel,ipywidgets,nbconvert,pandas,psutil,setuptools,structlog,tinydb,toolz,tqdm,zmq | ||
known_third_party = adaptive,cloudpickle,ipyparallel,ipywidgets,jinja2,nbconvert,pandas,psutil,setuptools,structlog,tinydb,toolz,tqdm,zmq |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
recursive-include adaptive_scheduler *.py | ||
include adaptive_scheduler/run_script.py.j2 | ||
include LICENSE | ||
include README.rst | ||
include requirements.txt |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
#!/usr/bin/env python3 | ||
# {{ run_script_fname }}, automatically generated | ||
# by `adaptive_scheduler.server_support._make_default_run_script()`. | ||
{% if executor_type == "dask-mpi" %} | ||
from dask_mpi import initialize | ||
initialize() | ||
{% endif %} | ||
import argparse | ||
from contextlib import suppress | ||
|
||
import adaptive | ||
import cloudpickle | ||
from adaptive_scheduler import client_support | ||
{%- if executor_type == "mpi4py" %} | ||
from mpi4py.futures import MPIPoolExecutor | ||
{% elif executor_type == "ipyparallel" %} | ||
from adaptive_scheduler.utils import connect_to_ipyparallel | ||
{% elif executor_type == "dask-mpi" %} | ||
from distributed import Client | ||
{% elif executor_type == "process-pool" %} | ||
from loky import get_reusable_executor | ||
{% endif %} | ||
if __name__ == "__main__": # ← use this, see warning @ https://bit.ly/2HAk0GG | ||
|
||
# parse arguments | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument("--profile", action="store", dest="profile", type=str) | ||
parser.add_argument("--n", action="store", dest="n", type=int) | ||
parser.add_argument("--log-fname", action="store", dest="log_fname", type=str) | ||
parser.add_argument("--job-id", action="store", dest="job_id", type=str) | ||
parser.add_argument("--name", action="store", dest="name", type=str) | ||
args = parser.parse_args() | ||
|
||
# the address of the "database manager" | ||
url = "{{ url }}" | ||
|
||
# ask the database for a learner that we can run which we log in `args.log_fname` | ||
learner, fname = client_support.get_learner( | ||
url, args.log_fname, args.job_id, args.name | ||
) | ||
|
||
# load the data | ||
with suppress(Exception): | ||
learner.load(fname) | ||
|
||
# connect to the executor | ||
{%- if executor_type == "mpi4py" %} | ||
executor = MPIPoolExecutor() | ||
{% elif executor_type == "ipyparallel" %} | ||
executor = connect_to_ipyparallel(profile=args.profile, n=args.n) | ||
{% elif executor_type == "dask-mpi" %} | ||
executor = Client() | ||
{% elif executor_type == "process-pool" %} | ||
executor = get_reusable_executor(max_workers=args.n) | ||
{% endif %} | ||
# this is serialized by cloudpickle.dumps | ||
runner_kwargs = cloudpickle.loads( | ||
{{ serialized_runner_kwargs }} | ||
) | ||
|
||
# run until `some_goal` is reached with an `MPIPoolExecutor` | ||
runner = adaptive.Runner(learner, executor=executor, **runner_kwargs) | ||
|
||
# periodically save the data (in case the job dies) | ||
runner.start_periodic_saving(dict(fname=fname), interval={{ save_interval }}) | ||
|
||
# log progress info in the job output script, optional | ||
client_support.log_info(runner, interval={{ log_interval }}) | ||
|
||
# block until runner goal reached | ||
runner.ioloop.run_until_complete(runner.task) | ||
|
||
# save once more after the runner is done | ||
learner.save(fname) | ||
|
||
# tell the database that this learner has reached its goal | ||
client_support.tell_done(url, fname) | ||
|
||
# log once more after the runner is done | ||
client_support.log_info(runner, interval=0) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,6 +2,7 @@ adaptive | |
cloudpickle | ||
dill | ||
ipyparallel | ||
jinja2 | ||
loky | ||
mpi4py | ||
pandas | ||
|