Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated CircleCI configuration #116

Merged
merged 7 commits into from
Mar 3, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
124 changes: 124 additions & 0 deletions .circleci/build_artifacts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
#!/usr/bin/env python

import logging
import json
import os
import subprocess
import shutil
import sys
import tarfile
import tempfile
import time
import types
import typing

from datetime import datetime

from junitparser import TestCase, TestSuite, JUnitXml, Skipped, Error

root = logging.getLogger()
root.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
root.addHandler(handler)

logger = logging.getLogger(__file__)

IPYDB_REQUIRED_FILES: typing.List[str] = ['requirements.txt']
ENCODING: str = 'utf-8'
ARTIFACT_DEST_DIR: str = '/tmp/artifacts'
TEST_OUTPUT_DIR: str = '/tmp/test-results'
BUILD_STATE: typing.Dict[str, typing.Any] = {}
if not os.path.exists(TEST_OUTPUT_DIR):
os.makedirs(TEST_OUTPUT_DIR)
TEST_CASES: typing.List[TestCase] = []

def run_command(cmd: typing.List[str]) -> types.GeneratorType:
proc = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
while proc.poll() is None:
time.sleep(.1)

if proc.poll() > 0:
yield proc.poll(), proc.stderr.read().decode(ENCODING)

elif proc.poll() != None:
yield proc.poll(), proc.stdout.read().decode(ENCODING)

else:
# if proc.poll() is None, its still running the subprocess.
# block until done
pass


def find_artifacts(start_dir: str) -> types.GeneratorType:
for root, dirnames, filenames in os.walk(start_dir):
for filename in filenames:
if filename.endswith('.tar.gz'):
yield os.path.join(start_dir, filename)

for artifact_path in find_artifacts(ARTIFACT_DEST_DIR):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably the below should be an if __name__ == '__main__' block? Not critical, but might be useful for porting to nbpages (see general comment)

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or alternatively, make this a main function, and have an if __name__ == '__main__' that does command-line parsing.

logger.info(f'Found Artifact in path[{artifact_path}]. Building Artifact')
notebook_name: str = os.path.basename(artifact_path).rsplit('.', 1)[0]
extraction_path: str = tempfile.mkdtemp(prefix=notebook_name)
build_script_path: str = None
with tarfile.open(artifact_path, "r:gz") as tar:
for member in tar.getmembers():
if member.isdir():
dir_path: str = os.path.join(extraction_path, member.path)
os.makedirs(dir_path)

elif member.isfile():
filepath: str = os.path.join(extraction_path, member.path)
with open(filepath, 'wb') as stream:
stream.write(tar.extractfile(member).read())

if os.path.basename(member.path) == 'build.sh':
build_script_path = filepath

else:
raise NotImplementedError


owd: str = os.getcwd()
build_dir: str = os.path.dirname(build_script_path)
logger.info(f'Changing to build_dir[{build_dir}]')
os.chdir(build_dir)
BUILD_STATE[notebook_name] = {'stdout': [], 'stderr': []}
start = datetime.utcnow()
for return_code, comm, in run_command(['bash', 'build.sh']):
if return_code > 0:
logger.error(comm)
BUILD_STATE[notebook_name]['exit-code'] = return_code
BUILD_STATE[notebook_name]['stderr'].append(comm)

else:
BUILD_STATE[notebook_name]['exit-code'] = return_code
BUILD_STATE[notebook_name]['stdout'].append(comm)
logger.info(comm)

delta = datetime.utcnow() - start
logger.info(f'Changing back to old working dir[{owd}]')
os.chdir(owd)
test_case = TestCase(f'{notebook_name} Test')
if BUILD_STATE[notebook_name]['exit-code'] > 0:
test_case.result = Error('\n'.join(BUILD_STATE[notebook_name]['stderr']), BUILD_STATE[notebook_name]['exit-code'])
TEST_CASES.append(test_case)

TEST_CASES.append(test_case)

test_suite = TestSuite(f'Notebooks Test Suite')
[test_suite.add_testcase(case) for case in TEST_CASES]
test_output_path: str = os.path.join(TEST_OUTPUT_DIR, f'results.xml')
xml = JUnitXml()
xml.add_testsuite(test_suite)
xml.write(test_output_path)

# from nbpages import make_parser, run_parsed, make_html_index
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this left-over from a previous attempt, or is it intentional that there's a commented-out part here? (and if the latter, maybe add a comment above explaining why)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this logic was moved to make_pages.py; I failed to remove the comments; fixed

#
# args = make_parser().parse_args()
#
# converted = run_parsed('.', output_type='HTML', args=args)
#
# converted = [item for item in converted if not os.path.basename(item) in ['test-fail.html', 'test-succeed.html']]
# make_html_index(converted, './index.tpl')

93 changes: 17 additions & 76 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,107 +7,48 @@ executors:
working_directory: ~/repo

jobs:
build_and_check:
Build_Notebooks:
executor: notebook-executor
steps:

- checkout

- restore_cache:
keys:
- v1-env-{{ checksum "environment.yml" }}

- run:
name: Setup Environment
command: |
./.circleci/setup_env.sh

- save_cache:
key: v1-env-{{ checksum "environment.yml" }}
paths:
- /opt/conda

- run:
name: Run/convert notebooks
name: Create Artifacts
command: |
conda info --envs
source activate notebooks_env
conda info --envs
mkdir test-results
python ./convert.py --report test-results/results.xml
no_output_timeout: 20m
python ./.circleci/create_artifacts.py

- store_test_results:
path: test-results
- store_artifacts:
path: /tmp/artifacts

- run:
name: Check notebooks
name: Build Artifacts
command: |
COMMIT_RANGE=$(echo "${CIRCLE_COMPARE_URL}" | cut -d/ -f7)
if [[ $COMMIT_RANGE != *"..."* ]]; then
COMMIT_RANGE="${COMMIT_RANGE}...${COMMIT_RANGE}"
fi
source activate notebooks_env
conda info --envs
python -m "nbpages.check_nbs" --commit-range=${COMMIT_RANGE}
python ./.circleci/build_artifacts.py

- persist_to_workspace:
root: ~/
paths:
- repo

- store_artifacts:
path: ~/repo
- store_test_results:
path: /tmp/test-results

- run:
name: "Built notebooks are available at:"
command: NB_URL="${CIRCLE_BUILD_URL}/artifacts/${CIRCLE_NODE_INDEX}/${CIRCLE_WORKING_DIRECTORY/#\~/$HOME}/index.html"; echo $NB_URL


deploy:
executor: notebook-executor
environment:
- DEPLOY_BRANCH: gh-pages
steps:
- attach_workspace:
at: ~/
name: Make Pages
command: |
source activate notebooks_env
python ./.circleci/make_pages.py

- add_ssh_keys

# clone gh-pages branch, copy over files from build, commit and push
# export and sources are needed before script because CircleCI does not support
# interpolation when setting environment variables
# i.e. can't set environment vars to other environment vars (for use in script)
# see https://circleci.com/docs/2.0/env-vars/#setting-an-environment-variable-in-a-shell-command
- deploy:
name: Deploy
- run:
name: Deploy Pages
command: |
if [[ -z ${CIRCLE_PULL_REQUEST} ]]; then
echo "export WORKSPACE=${CIRCLE_WORKING_DIRECTORY}" >> $BASH_ENV
echo "export REPO_URL=${CIRCLE_REPOSITORY_URL}" >> $BASH_ENV
source $BASH_ENV
git config --global user.email $GH_EMAIL
git config --global user.name $GH_NAME
echo "Host * " >> ~/.ssh/config
echo " StrictHostKeyChecking no" >> ~/.ssh/config
./gh_pages_deploy.sh
else
echo "PR, not deploying..."
echo "${CIRCLE_PULL_REQUEST}"
echo "${CIRCLE_PULL_NUMBER}"
fi
bash ./.circleci/deploy-notebooks.sh


workflows:
version: 2.1

build-deploy:
jobs:
- build_and_check
- deploy:
requires:
- build_and_check
filters:
branches:
only:
- master
- Build_Notebooks
78 changes: 78 additions & 0 deletions .circleci/create_artifacts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#!/usr/bin/env python

import logging
import os
import shutil
import sys
import tarfile
import tempfile
import types
import typing

root = logging.getLogger()
root.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
root.addHandler(handler)

logger = logging.getLogger(__file__)

IPYDB_REQUIRED_FILES: typing.List[str] = ['requirements.txt']
ENCODING: str = 'utf-8'
ARTIFACT_DEST_DIR: str = '/tmp/artifacts'

def find_ipynb_files(start_path: str) -> types.GeneratorType:
for root, dirnames, filenames in os.walk(start_path):
is_ipydb_directory: bool = False
for filename in filenames:
if filename.endswith('.ipynb'):
is_ipydb_directory = True
break

if is_ipydb_directory:
has_error: bool = False
for filename in IPYDB_REQUIRED_FILES:
if not filename in filenames:
logger.error(f'Missing file[{filename}] in dir[{os.path.relpath(root)}]')
has_error = True

if has_error is False:
yield os.path.abspath(root)

for notebook_path in find_ipynb_files(os.getcwd()):
logger.info(f'Found notebook in path[{os.path.relpath(notebook_path)}]. Building Artifact')
notebook_name: str = os.path.basename(notebook_path)
notebook_name_plain: str = notebook_name.rsplit('.', 1)[0]
build_path = tempfile.mkdtemp(prefix=notebook_name)
shutil.rmtree(build_path)
build_script_path = os.path.join(build_path, 'build.sh')
shutil.copytree(notebook_path, build_path)
setup_script: str = f"""#!/usr/bin/env bash
set -e
cd {build_path}
source activate notebooks_env
virtualenv -p $(which python3) env
conda deactivate
source env/bin/activate
pip install -r requirements.txt
pip install jupyter
jupyter nbconvert --stdout --to html {notebook_name} > {notebook_name_plain}.html
cd -
"""
with open(build_script_path, 'w') as stream:
stream.write(setup_script)

logger.info(f'Taring Notebook[{notebook_name}]')
artifact_name: str = f'{notebook_name_plain}.tar.gz'
artifact_dir_path: str = os.path.dirname(tempfile.NamedTemporaryFile().name)
artifact_path: str = os.path.join(artifact_dir_path, artifact_name)
with tarfile.open(artifact_path, "w:gz") as tar:
tar.add(build_path, arcname=os.path.basename(build_path))

if not os.path.exists(ARTIFACT_DEST_DIR):
os.makedirs(ARTIFACT_DEST_DIR)

artifact_dest: str = os.path.join(ARTIFACT_DEST_DIR, artifact_name)
logger.info(f'Moving Notebook[{notebook_name_plain}]')
shutil.move(artifact_path, artifact_dest)

19 changes: 19 additions & 0 deletions .circleci/deploy-notebooks.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env bash

set -e
# Only runs on master
if [ -z "${CIRCLE_PULL_REQUEST}" ]; then
git config --global user.email devnull@circleci.com
git config --global user.name CircleCI
mkdir -p ~/.ssh
echo 'Host * ' >> ~/.ssh/config
echo ' StrictHostKeyChecking no' >> ~/.ssh/config
# Deploy gh-pages
git clone -b gh-pages --single-branch ${CIRCLE_REPOSITORY_URL} /tmp/out
cd /tmp/out
git add .
git commit -m 'Automated deployment to Github Pages: ${BUILD_TAG}' -a || true
git push origin gh-pages
git clean -dfx
fi
exit 0
23 changes: 23 additions & 0 deletions .circleci/make_pages.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/usr/bin/env python

import logging
import os
import sys

from nbpages import make_parser, run_parsed, make_html_index

root = logging.getLogger()
root.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
root.addHandler(handler)

logger = logging.getLogger(__file__)

args = make_parser().parse_args()
logger.info('Converting notebooks into HTML')
converted = run_parsed('.', output_type='HTML', args=args)
# converted = [item for item in converted if not os.path.basename(item) in ['test-fail.html', 'test-succeed.html']]

logger.info('Creating HTML Index')
make_html_index(converted, './index.tpl')
14 changes: 6 additions & 8 deletions .circleci/setup_env.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
#!/bin/bash

if [[ ! -d /opt/conda/envs/notebooks_env ]]; then
conda info --envs
conda env update --file=environment.yml
source activate notebooks_env
conda info --envs
else
echo "Using cached miniconda environment";
fi
apt-get update
apt-get install build-essential gcc-4.8 -y
conda info --envs
conda env update --file=environment.yml
source activate notebooks_env
conda info --envs
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
**/*.ipynb_checkpoints/
*.fits
*.swp
src
index.html
env
Loading