Skip to content

Commit

Permalink
More logging, README instruction for passing .env
Browse files Browse the repository at this point in the history
  • Loading branch information
nolan1999 committed Oct 27, 2024
1 parent 1060464 commit ca20bbc
Show file tree
Hide file tree
Showing 4 changed files with 170 additions and 4 deletions.
160 changes: 160 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/

# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# see README.md for usage
FROM python:3.10

WORKDIR .
WORKDIR /app

COPY requirements.txt .
COPY requirements.txt /app
RUN pip install -r requirements.txt

COPY . .
COPY . /app

CMD ["python", "-m", "cli.main"]
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ Copy the `.env.example` file to a `.env` file at the root of the directory and d
#### Install the nvidia-container-toolkit
[See here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) to run GPU jobs
#### Run the Docker container
`docker run --gpus '"device=0"' -v <PATH_HOST_BASE>:<PATH_BASE> -v /var/run/docker.sock:/var/run/docker.sock --add-host=host.docker.internal:host-gateway jobfetcher:latest`, where:
`docker run --env-file .env --gpus '"device=0"' -v <PATH_HOST_BASE>:<PATH_BASE> -v /var/run/docker.sock:/var/run/docker.sock --add-host=host.docker.internal:host-gateway jobfetcher:latest`, where:
- `<PATH_HOST_BASE>` and `<PATH_BASE>` are set as above.
- `--add-host=host.docker.internal:host-gateway jobfetcher:latest` is required only when running Linux.
- The `--gpus '"device=0"'` option specifies which GPUs the worker should be able to use. `--gpus all` selects all GPUs, but you typically want to select which GPU to reserve, and if you have many, run multiple workers each with one reserved GPU.
Expand Down
6 changes: 6 additions & 0 deletions cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@
raise ValueError(f"Expected only one job, got {len(jobs)}")

job_id, job = jobs.popitem()
logger.info(f"Pulled job {job_id}.")
api_job = api.worker.JobAPI(job_id, api_worker)

pinger_pre = status.pinger.ParallelPinger(
Expand All @@ -67,6 +68,7 @@
timeout=TIMEOUT_STATUS,
)
pinger_pre.start()
logger.info(f"Preprocessing job {job_id}")

path_job = path_base / job_id

Expand Down Expand Up @@ -124,9 +126,12 @@
ping=status.status.DockerStatus(container, ping=api_job.ping).ping,
timeout=TIMEOUT_STATUS,
)
logger.info(f"Running job {job_id}")
pinger_run.start()
pinger_run.stop()
res = container.wait()
logger.info(f"Job {job_id} finished with exit code {res['StatusCode']}")
logger.info(f"Postprocessing job {job_id}")

# upload result
pinger_post = status.pinger.ParallelPinger(
Expand Down Expand Up @@ -158,4 +163,5 @@
else:
raise e

logger.info(f"Job {job_id} finished")
shutil.rmtree(path_job)

0 comments on commit ca20bbc

Please sign in to comment.