-
Notifications
You must be signed in to change notification settings - Fork 164
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Updated CircleCI configuration to build each notebook+requirements fo…
…r each notebook. Also added TestCase handling that'll report when errors are found with the pip install -r requirements.txt.
- Loading branch information
1 parent
d28a85c
commit b92fb5c
Showing
8 changed files
with
293 additions
and
104 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,124 @@ | ||
#!/usr/bin/env python | ||
|
||
import logging | ||
import json | ||
import os | ||
import subprocess | ||
import shutil | ||
import sys | ||
import tarfile | ||
import tempfile | ||
import time | ||
import types | ||
import typing | ||
|
||
from datetime import datetime | ||
|
||
from junitparser import TestCase, TestSuite, JUnitXml, Skipped, Error | ||
|
||
root = logging.getLogger() | ||
root.setLevel(logging.INFO) | ||
handler = logging.StreamHandler(sys.stdout) | ||
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) | ||
root.addHandler(handler) | ||
|
||
logger = logging.getLogger(__file__) | ||
|
||
IPYDB_REQUIRED_FILES: typing.List[str] = ['requirements.txt'] | ||
ENCODING: str = 'utf-8' | ||
ARTIFACT_DEST_DIR: str = '/tmp/artifacts' | ||
TEST_OUTPUT_DIR: str = '/tmp/test-results' | ||
BUILD_STATE: typing.Dict[str, typing.Any] = {} | ||
if not os.path.exists(TEST_OUTPUT_DIR): | ||
os.makedirs(TEST_OUTPUT_DIR) | ||
TEST_CASES: typing.List[TestCase] = [] | ||
|
||
def run_command(cmd: typing.List[str]) -> types.GeneratorType: | ||
proc = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) | ||
while proc.poll() is None: | ||
time.sleep(.1) | ||
|
||
if proc.poll() > 0: | ||
yield proc.poll(), proc.stderr.read().decode(ENCODING) | ||
|
||
elif proc.poll() != None: | ||
yield proc.poll(), proc.stdout.read().decode(ENCODING) | ||
|
||
else: | ||
# if proc.poll() is None, its still running the subprocess. | ||
# block until done | ||
pass | ||
|
||
|
||
def find_artifacts(start_dir: str) -> types.GeneratorType: | ||
for root, dirnames, filenames in os.walk(start_dir): | ||
for filename in filenames: | ||
if filename.endswith('.tar.gz'): | ||
yield os.path.join(start_dir, filename) | ||
|
||
for artifact_path in find_artifacts(ARTIFACT_DEST_DIR): | ||
logger.info(f'Found Artifact in path[{artifact_path}]. Building Artifact') | ||
notebook_name: str = os.path.basename(artifact_path).rsplit('.', 1)[0] | ||
extraction_path: str = tempfile.mkdtemp(prefix=notebook_name) | ||
build_script_path: str = None | ||
with tarfile.open(artifact_path, "r:gz") as tar: | ||
for member in tar.getmembers(): | ||
if member.isdir(): | ||
dir_path: str = os.path.join(extraction_path, member.path) | ||
os.makedirs(dir_path) | ||
|
||
elif member.isfile(): | ||
filepath: str = os.path.join(extraction_path, member.path) | ||
with open(filepath, 'wb') as stream: | ||
stream.write(tar.extractfile(member).read()) | ||
|
||
if os.path.basename(member.path) == 'build.sh': | ||
build_script_path = filepath | ||
|
||
else: | ||
raise NotImplementedError | ||
|
||
|
||
owd: str = os.getcwd() | ||
build_dir: str = os.path.dirname(build_script_path) | ||
logger.info(f'Changing to build_dir[{build_dir}]') | ||
os.chdir(build_dir) | ||
BUILD_STATE[notebook_name] = {'stdout': [], 'stderr': []} | ||
start = datetime.utcnow() | ||
for return_code, comm, in run_command(['bash', 'build.sh']): | ||
if return_code > 0: | ||
logger.error(comm) | ||
BUILD_STATE[notebook_name]['exit-code'] = return_code | ||
BUILD_STATE[notebook_name]['stderr'].append(comm) | ||
|
||
else: | ||
BUILD_STATE[notebook_name]['exit-code'] = return_code | ||
BUILD_STATE[notebook_name]['stdout'].append(comm) | ||
logger.info(comm) | ||
|
||
delta = datetime.utcnow() - start | ||
logger.info(f'Changing back to old working dir[{owd}]') | ||
os.chdir(owd) | ||
test_case = TestCase(f'{notebook_name} Test') | ||
if BUILD_STATE[notebook_name]['exit-code'] > 0: | ||
test_case.result = Error('\n'.join(BUILD_STATE[notebook_name]['stderr']), BUILD_STATE[notebook_name]['exit-code']) | ||
TEST_CASES.append(test_case) | ||
|
||
TEST_CASES.append(test_case) | ||
|
||
test_suite = TestSuite(f'Notebooks Test Suite') | ||
[test_suite.add_testcase(case) for case in TEST_CASES] | ||
test_output_path: str = os.path.join(TEST_OUTPUT_DIR, f'results.xml') | ||
xml = JUnitXml() | ||
xml.add_testsuite(test_suite) | ||
xml.write(test_output_path) | ||
|
||
# from nbpages import make_parser, run_parsed, make_html_index | ||
# | ||
# args = make_parser().parse_args() | ||
# | ||
# converted = run_parsed('.', output_type='HTML', args=args) | ||
# | ||
# converted = [item for item in converted if not os.path.basename(item) in ['test-fail.html', 'test-succeed.html']] | ||
# make_html_index(converted, './index.tpl') | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
#!/usr/bin/env python | ||
|
||
import logging | ||
import os | ||
import shutil | ||
import sys | ||
import tarfile | ||
import tempfile | ||
import types | ||
import typing | ||
|
||
root = logging.getLogger() | ||
root.setLevel(logging.INFO) | ||
handler = logging.StreamHandler(sys.stdout) | ||
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) | ||
root.addHandler(handler) | ||
|
||
logger = logging.getLogger(__file__) | ||
|
||
IPYDB_REQUIRED_FILES: typing.List[str] = ['requirements.txt'] | ||
ENCODING: str = 'utf-8' | ||
ARTIFACT_DEST_DIR: str = '/tmp/artifacts' | ||
|
||
def find_ipynb_files(start_path: str) -> types.GeneratorType: | ||
for root, dirnames, filenames in os.walk(start_path): | ||
is_ipydb_directory: bool = False | ||
for filename in filenames: | ||
if filename.endswith('.ipynb'): | ||
is_ipydb_directory = True | ||
break | ||
|
||
if is_ipydb_directory: | ||
has_error: bool = False | ||
for filename in IPYDB_REQUIRED_FILES: | ||
if not filename in filenames: | ||
logger.error(f'Missing file[{filename}] in dir[{os.path.relpath(root)}]') | ||
has_error = True | ||
|
||
if has_error is False: | ||
yield os.path.abspath(root) | ||
|
||
for notebook_path in find_ipynb_files(os.getcwd()): | ||
logger.info(f'Found notebook in path[{os.path.relpath(notebook_path)}]. Building Artifact') | ||
notebook_name: str = os.path.basename(notebook_path) | ||
notebook_name_plain: str = notebook_name.rsplit('.', 1)[0] | ||
build_path = tempfile.mkdtemp(prefix=notebook_name) | ||
shutil.rmtree(build_path) | ||
build_script_path = os.path.join(build_path, 'build.sh') | ||
shutil.copytree(notebook_path, build_path) | ||
setup_script: str = f"""#!/usr/bin/env bash | ||
set -e | ||
cd {build_path} | ||
source activate notebooks_env | ||
virtualenv -p $(which python3) env | ||
conda deactivate | ||
source env/bin/activate | ||
pip install -r requirements.txt | ||
pip install jupyter | ||
jupyter nbconvert --stdout --to html {notebook_name} > {notebook_name_plain}.html | ||
cd - | ||
""" | ||
with open(build_script_path, 'w') as stream: | ||
stream.write(setup_script) | ||
|
||
logger.info(f'Taring Notebook[{notebook_name}]') | ||
artifact_name: str = f'{notebook_name_plain}.tar.gz' | ||
artifact_dir_path: str = os.path.dirname(tempfile.NamedTemporaryFile().name) | ||
artifact_path: str = os.path.join(artifact_dir_path, artifact_name) | ||
with tarfile.open(artifact_path, "w:gz") as tar: | ||
tar.add(build_path, arcname=os.path.basename(build_path)) | ||
|
||
if not os.path.exists(ARTIFACT_DEST_DIR): | ||
os.makedirs(ARTIFACT_DEST_DIR) | ||
|
||
artifact_dest: str = os.path.join(ARTIFACT_DEST_DIR, artifact_name) | ||
logger.info(f'Moving Notebook[{notebook_name_plain}]') | ||
shutil.move(artifact_path, artifact_dest) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
#!/usr/bin/env bash | ||
|
||
set -e | ||
# Only runs on master | ||
if [ -z "${CIRCLE_PULL_REQUEST}" ]; then | ||
git config --global user.email devnull@circleci.com | ||
git config --global user.name CircleCI | ||
mkdir -p ~/.ssh | ||
echo 'Host * ' >> ~/.ssh/config | ||
echo ' StrictHostKeyChecking no' >> ~/.ssh/config | ||
# Deploy gh-pages | ||
git clone -b gh-pages --single-branch ${CIRCLE_REPOSITORY_URL} /tmp/out | ||
cd /tmp/out | ||
git add . | ||
git commit -m 'Automated deployment to Github Pages: ${BUILD_TAG}' -a || true | ||
git push origin gh-pages | ||
git clean -dfx | ||
fi | ||
exit 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
#!/usr/bin/env python | ||
|
||
import logging | ||
import os | ||
import sys | ||
|
||
from nbpages import make_parser, run_parsed, make_html_index | ||
|
||
root = logging.getLogger() | ||
root.setLevel(logging.INFO) | ||
handler = logging.StreamHandler(sys.stdout) | ||
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) | ||
root.addHandler(handler) | ||
|
||
logger = logging.getLogger(__file__) | ||
|
||
args = make_parser().parse_args() | ||
logger.info('Converting notebooks into HTML') | ||
converted = run_parsed('.', output_type='HTML', args=args) | ||
# converted = [item for item in converted if not os.path.basename(item) in ['test-fail.html', 'test-succeed.html']] | ||
|
||
logger.info('Creating HTML Index') | ||
make_html_index(converted, './index.tpl') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,8 @@ | ||
#!/bin/bash | ||
|
||
if [[ ! -d /opt/conda/envs/notebooks_env ]]; then | ||
conda info --envs | ||
conda env update --file=environment.yml | ||
source activate notebooks_env | ||
conda info --envs | ||
else | ||
echo "Using cached miniconda environment"; | ||
fi | ||
apt-get update | ||
apt-get install build-essential gcc-4.8 -y | ||
conda info --envs | ||
conda env update --file=environment.yml | ||
source activate notebooks_env | ||
conda info --envs |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,6 @@ | ||
**/*.ipynb_checkpoints/ | ||
*.fits | ||
*.swp | ||
src | ||
index.html | ||
env |
Oops, something went wrong.