From 5411932f8d0c29ef65c657b441dc2fda28779bf0 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Wed, 21 Apr 2021 09:18:59 -0400 Subject: [PATCH 01/15] First cut at github actions --- .github/workflows/unit-tests_and_docs.yml | 96 +++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 .github/workflows/unit-tests_and_docs.yml diff --git a/.github/workflows/unit-tests_and_docs.yml b/.github/workflows/unit-tests_and_docs.yml new file mode 100644 index 00000000..1d9cb687 --- /dev/null +++ b/.github/workflows/unit-tests_and_docs.yml @@ -0,0 +1,96 @@ +name: Run unit tests and build docs + +on: + # Triggers the workflow on push or pull request events but only for the main branch + # Remove push when finally merging. + push: + branches: [ main ] + pull_request: + branches: [ main ] + + # Allows you to run this workflow manually from the Actions tab. + workflow_dispatch: + +jobs: + # This job is called test_docs. + test_docs: + # Run on Ubuntu + runs-on: ubuntu-latest + + steps: + + # TEMPORARY WHILE GITHUB FIXES THIS https://github.com/actions/virtual-environments/issues/3185 + - name: Add the current IP address, long hostname and short hostname record to /etc/hosts file + run: | + echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Install OpenMDAO Stack + run: | + echo "============================================================="; + echo "Run #${GITHUB_RUN_NUMBER}"; + echo "Run ID: ${GITHUB_RUN_ID}"; + echo "Testing: ${GITHUB_REPOSITORY}"; + echo "Triggered by: ${GITHUB_EVENT_NAME}"; + echo "Initiated by: ${GITHUB_ACTOR}"; + echo "============================================================="; + echo "============================================================="; + echo "Create conda environment"; + echo "============================================================="; + source $CONDA/etc/profile.d/conda.sh; + echo $CONDA/bin >> $GITHUB_PATH; + conda create -n OpenMDAO python=3.8 numpy=1.18 scipy=1.4 -q -y; + conda activate OpenMDAO; + echo "============================================================="; + echo "Install PETSc"; + echo "============================================================="; + conda install -c anaconda mpi4py -q -y; + conda install -c conda-forge petsc=3.12 petsc4py -q -y; + echo "============================================================="; + echo "Install pyoptsparse"; + echo "============================================================="; + git clone -q https://github.com/OpenMDAO/build_pyoptsparse; + cd build_pyoptsparse; + chmod 755 ./build_pyoptsparse.sh; + ./build_pyoptsparse.sh -b "v2.1.5"; + cd ..; + export LD_LIBRARY_PATH=$HOME/ipopt/lib; + echo "============================================================="; + echo "Install OpenMDAO"; + echo "============================================================="; + cd ..; + git clone -q https://github.com/OpenMDAO/OpenMDAO; + cd OpenMDAO; + pip install .[all]; + pip install testflo; + cd ../mphys; + pip install -e . + echo "============================================================="; + echo "List installed packages/versions"; + echo "============================================================="; + conda list; + echo "============================================================="; + echo "Run the tests."; + echo "============================================================="; + cd tests/unit_tests + testflo -n 1 + echo "============================================================="; + echo "Build the docs."; + echo "============================================================="; + cd ../../docs + make html + cd _build/html + zip -r ./mphys.zip . + echo "============================================================="; + echo "Operations Completed."; + echo "============================================================="; + - name: 'Upload Docs' + uses: actions/upload-artifact@v2 + with: + name: mphys + path: docs/_build/html/mphys.zip + retention-days: 7 From c0f624971bb6d2b3c99db3dfeb3065d48578c238 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 12:05:24 -0400 Subject: [PATCH 02/15] Small changes to ci yml --- .../{unit-tests_and_docs.yml => unit_tests_and_docs.yml} | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) rename .github/workflows/{unit-tests_and_docs.yml => unit_tests_and_docs.yml} (89%) diff --git a/.github/workflows/unit-tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml similarity index 89% rename from .github/workflows/unit-tests_and_docs.yml rename to .github/workflows/unit_tests_and_docs.yml index 1d9cb687..35e659b0 100644 --- a/.github/workflows/unit-tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -1,4 +1,4 @@ -name: Run unit tests and build docs +name: Unit tests and docs on: # Triggers the workflow on push or pull request events but only for the main branch @@ -19,10 +19,6 @@ jobs: steps: - # TEMPORARY WHILE GITHUB FIXES THIS https://github.com/actions/virtual-environments/issues/3185 - - name: Add the current IP address, long hostname and short hostname record to /etc/hosts file - run: | - echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it. - uses: actions/checkout@v2 - uses: actions/setup-python@v2 @@ -77,7 +73,7 @@ jobs: echo "Run the tests."; echo "============================================================="; cd tests/unit_tests - testflo -n 1 + testflo echo "============================================================="; echo "Build the docs."; echo "============================================================="; From 21ee3b8aac6c82a38993dc34df947f8221403a2c Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 12:55:50 -0400 Subject: [PATCH 03/15] Add sphinx utilities that no longer live in OM --- docs/_exts/embed_code.py | 2 +- docs/_exts/embed_compare.py | 2 +- docs/conf.py | 7 +- mphys/doc_utils/_utils/__init__.py | 0 mphys/doc_utils/_utils/docutil.py | 907 ++++++++++++++++++ mphys/doc_utils/_utils/generate_sourcedocs.py | 162 ++++ mphys/doc_utils/_utils/patch.py | 138 +++ mphys/doc_utils/_utils/preprocess_tags.py | 95 ++ mphys/doc_utils/_utils/run_sub.py | 41 + mphys/doc_utils/_utils/upload_doc_version.py | 99 ++ mphys/doc_utils/config_params.py | 4 + 11 files changed, 1451 insertions(+), 6 deletions(-) create mode 100644 mphys/doc_utils/_utils/__init__.py create mode 100644 mphys/doc_utils/_utils/docutil.py create mode 100644 mphys/doc_utils/_utils/generate_sourcedocs.py create mode 100644 mphys/doc_utils/_utils/patch.py create mode 100644 mphys/doc_utils/_utils/preprocess_tags.py create mode 100644 mphys/doc_utils/_utils/run_sub.py create mode 100644 mphys/doc_utils/_utils/upload_doc_version.py create mode 100644 mphys/doc_utils/config_params.py diff --git a/docs/_exts/embed_code.py b/docs/_exts/embed_code.py index 018eef2d..0e005d30 100644 --- a/docs/_exts/embed_code.py +++ b/docs/_exts/embed_code.py @@ -10,7 +10,7 @@ from docutils.parsers.rst.directives import unchanged, images -from openmdao.docs._utils.docutil import get_source_code, remove_docstrings, \ +from mphys.doc_utils._utils.docutil import get_source_code, remove_docstrings, \ remove_initial_empty_lines, replace_asserts_with_prints, \ strip_header, dedent, insert_output_start_stop_indicators, run_code, \ get_skip_output_node, get_interleaved_io_nodes, get_output_block_node, \ diff --git a/docs/_exts/embed_compare.py b/docs/_exts/embed_compare.py index 49363c44..6badf216 100644 --- a/docs/_exts/embed_compare.py +++ b/docs/_exts/embed_compare.py @@ -4,7 +4,7 @@ import sphinx from docutils.parsers.rst import Directive -from openmdao.docs._utils.docutil import get_source_code +from mphys.doc_utils._utils.docutil import get_source_code class ContentContainerDirective(Directive): diff --git a/docs/conf.py b/docs/conf.py index 6cc5e6a3..0b9f62bc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,13 +3,12 @@ # containing dir. import sys import os -import importlib +import importlib from unittest.mock import Mock -from openmdao.docs.config_params import MOCK_MODULES -from openmdao.docs._utils.patch import do_monkeypatch -from openmdao.docs._utils.upload_doc_version import get_doc_version +from mphys.doc_utils.config_params import MOCK_MODULES +from mphys.doc_utils._utils.patch import do_monkeypatch # Only mock the ones that don't import. for mod_name in MOCK_MODULES: diff --git a/mphys/doc_utils/_utils/__init__.py b/mphys/doc_utils/_utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mphys/doc_utils/_utils/docutil.py b/mphys/doc_utils/_utils/docutil.py new file mode 100644 index 00000000..fe348f91 --- /dev/null +++ b/mphys/doc_utils/_utils/docutil.py @@ -0,0 +1,907 @@ +""" +A collection of functions for modifying source code that is embeded into the Sphinx documentation. +""" + +import sys +import os +import re +import tokenize +import importlib +import inspect +import subprocess +import tempfile +import unittest +import traceback +import ast + +from docutils import nodes + +from collections import namedtuple + +from io import StringIO + +from sphinx.errors import SphinxError +from sphinx.writers.html import HTMLTranslator +from sphinx.writers.html5 import HTML5Translator +from redbaron import RedBaron + +import html as cgiesc + +from openmdao.utils.general_utils import printoptions + +sqlite_file = 'feature_docs_unit_test_db.sqlite' # name of the sqlite database file +table_name = 'feature_unit_tests' # name of the table to be queried + +_sub_runner = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'run_sub.py') + + +# an input block consists of a block of code and a tag that marks the end of any +# output from that code in the output stream (via inserted print('>>>>>#') statements) +InputBlock = namedtuple('InputBlock', 'code tag') + + +class skipped_or_failed_node(nodes.Element): + pass + + +def visit_skipped_or_failed_node(self, node): + pass + + +def depart_skipped_or_failed_node(self, node): + if not isinstance(self, (HTMLTranslator, HTML5Translator)): + self.body.append("output only available for HTML\n") + return + + html = '
{}
'.format(node["kind"], node["text"]) + self.body.append(html) + + +class in_or_out_node(nodes.Element): + pass + + +def visit_in_or_out_node(self, node): + pass + + +def depart_in_or_out_node(self, node): + """ + This function creates the formatting that sets up the look of the blocks. + The look of the formatting is controlled by _theme/static/style.css + """ + if not isinstance(self, (HTMLTranslator, HTML5Translator)): + self.body.append("output only available for HTML\n") + return + if node["kind"] == "In": + html = '
{}
'.format(node["text"]) + elif node["kind"] == "Out": + html = '
{}
'.format(node["text"]) + + self.body.append(html) + + +def node_setup(app): + app.add_node(skipped_or_failed_node, html=(visit_skipped_or_failed_node, depart_skipped_or_failed_node)) + app.add_node(in_or_out_node, html=(visit_in_or_out_node, depart_in_or_out_node)) + + +def remove_docstrings(source): + """ + Return 'source' minus docstrings. + + Parameters + ---------- + source : str + Original source code. + + Returns + ------- + str + Source with docstrings removed. + """ + io_obj = StringIO(source) + out = "" + prev_toktype = tokenize.INDENT + last_lineno = -1 + last_col = 0 + for tok in tokenize.generate_tokens(io_obj.readline): + token_type = tok[0] + token_string = tok[1] + start_line, start_col = tok[2] + end_line, end_col = tok[3] + # ltext = tok[4] # in original code but not used here + # The following two conditionals preserve indentation. + # This is necessary because we're not using tokenize.untokenize() + # (because it spits out code with copious amounts of oddly-placed + # whitespace). + if start_line > last_lineno: + last_col = 0 + if start_col > last_col: + out += (" " * (start_col - last_col)) + # This series of conditionals removes docstrings: + if token_type == tokenize.STRING: + if prev_toktype != tokenize.INDENT: + # This is likely a docstring; double-check we're not inside an operator: + if prev_toktype != tokenize.NEWLINE: + # Note regarding NEWLINE vs NL: The tokenize module + # differentiates between newlines that start a new statement + # and newlines inside of operators such as parens, brackes, + # and curly braces. Newlines inside of operators are + # NEWLINE and newlines that start new code are NL. + # Catch whole-module docstrings: + if start_col > 0: + # Unlabelled indentation means we're inside an operator + out += token_string + # Note regarding the INDENT token: The tokenize module does + # not label indentation inside of an operator (parens, + # brackets, and curly braces) as actual indentation. + # For example: + # def foo(): + # "The spaces before this docstring are tokenize.INDENT" + # test = [ + # "The spaces before this string do not get a token" + # ] + else: + out += token_string + prev_toktype = token_type + last_col = end_col + last_lineno = end_line + return out + + +def remove_redbaron_node(node, index): + """ + Utility function for removing a node using RedBaron. + + RedBaron has some problems with modifying code lines that run across + multiple lines. ( It is mentioned somewhere online but cannot seem to + find it now. ) + + RedBaron throws an Exception but when you check, it seems like it does + what you asked it to do. So, for now, we ignore the Exception. + """ + + try: + node.value.remove(node.value[index]) + except Exception as e: # no choice but to catch the general Exception + if str(e).startswith('It appears that you have indentation in your CommaList'): + pass + else: + raise + + +def replace_asserts_with_prints(src): + """ + Replace asserts with print statements. + + Using RedBaron, replace some assert calls with print statements that print the actual + value given in the asserts. Depending on the calls, the actual value can be the first or second + argument. + + Parameters + ---------- + src : str + String containing source lines. + + Returns + ------- + str + String containing source with asserts replaced by prints. + """ + rb = RedBaron(src) # convert to RedBaron internal structure + + # findAll is slow, so only check the ones that are present. + base_assert = ['assertAlmostEqual', 'assertLess', 'assertGreater', 'assertEqual', + 'assert_equal_arrays', 'assertTrue', 'assertFalse'] + used_assert = [item for item in base_assert if item in src] + + for assert_type in used_assert: + assert_nodes = rb.findAll("NameNode", value=assert_type) + for assert_node in assert_nodes: + assert_node = assert_node.parent + remove_redbaron_node(assert_node, 0) # remove 'self' from the call + assert_node.value[0].replace('print') + if assert_type not in ['assertTrue', 'assertFalse']: + # remove the expected value argument + remove_redbaron_node(assert_node.value[1], 1) + + if 'assert_rel_error' in src: + assert_nodes = rb.findAll("NameNode", value='assert_rel_error') + for assert_node in assert_nodes: + assert_node = assert_node.parent + # If relative error tolerance is specified, there are 4 arguments + if len(assert_node.value[1]) == 4: + # remove the relative error tolerance + remove_redbaron_node(assert_node.value[1], -1) + remove_redbaron_node(assert_node.value[1], -1) # remove the expected value + # remove the first argument which is the TestCase + remove_redbaron_node(assert_node.value[1], 0) + # + assert_node.value[0].replace("print") + + if 'assert_near_equal' in src: + assert_nodes = rb.findAll("NameNode", value='assert_near_equal') + for assert_node in assert_nodes: + assert_node = assert_node.parent + # If relative error tolerance is specified, there are 3 arguments + if len(assert_node.value[1]) == 3: + # remove the relative error tolerance + remove_redbaron_node(assert_node.value[1], -1) + remove_redbaron_node(assert_node.value[1], -1) # remove the expected value + assert_node.value[0].replace("print") + + if 'assert_almost_equal' in src: + assert_nodes = rb.findAll("NameNode", value='assert_almost_equal') + for assert_node in assert_nodes: + assert_node = assert_node.parent + # If relative error tolerance is specified, there are 3 arguments + if len(assert_node.value[1]) == 3: + # remove the relative error tolerance + remove_redbaron_node(assert_node.value[1], -1) + remove_redbaron_node(assert_node.value[1], -1) # remove the expected value + assert_node.value[0].replace("print") + + return rb.dumps() + + +def remove_initial_empty_lines(source): + """ + Some initial empty lines were added to keep RedBaron happy. + Need to strip these out before we pass the source code to the + directive for including source code into feature doc files. + """ + + idx = re.search(r'\S', source, re.MULTILINE).start() + return source[idx:] + + +def get_source_code(path): + """ + Return source code as a text string. + + Parameters + ---------- + path : str + Path to a file, module, function, class, or class method. + + Returns + ------- + str + The source code. + int + Indentation level. + module or None + The imported module. + class or None + The class specified by path. + method or None + The class method specified by path. + """ + + indent = 0 + class_obj = None + method_obj = None + + if path.endswith('.py'): + if not os.path.isfile(path): + raise SphinxError("Can't find file '%s' cwd='%s'" % (path, os.getcwd())) + with open(path, 'r') as f: + source = f.read() + module = None + else: + # First, assume module path since we want to support loading a full module as well. + try: + module = importlib.import_module(path) + source = inspect.getsource(module) + + except ImportError: + + # Second, assume class and see if it works + try: + parts = path.split('.') + + module_path = '.'.join(parts[:-1]) + module = importlib.import_module(module_path) + class_name = parts[-1] + class_obj = getattr(module, class_name) + source = inspect.getsource(class_obj) + indent = 1 + + except ImportError: + + # else assume it is a path to a method + module_path = '.'.join(parts[:-2]) + module = importlib.import_module(module_path) + class_name = parts[-2] + method_name = parts[-1] + class_obj = getattr(module, class_name) + method_obj = getattr(class_obj, method_name) + source = inspect.getsource(method_obj) + indent = 2 + + return remove_leading_trailing_whitespace_lines(source), indent, module, class_obj, method_obj + + +def remove_raise_skip_tests(src): + """ + Remove from the code any raise unittest.SkipTest lines since we don't want those in + what the user sees. + """ + rb = RedBaron(src) + raise_nodes = rb.findAll("RaiseNode") + for rn in raise_nodes: + # only the raise for SkipTest + if rn.value[:2].dumps() == 'unittestSkipTest': + rn.parent.value.remove(rn) + return rb.dumps() + + +def remove_leading_trailing_whitespace_lines(src): + """ + Remove any leading or trailing whitespace lines. + + Parameters + ---------- + src : str + Input code. + + Returns + ------- + str + Code with trailing whitespace lines removed. + """ + lines = src.splitlines() + + non_whitespace_lines = [] + for i, l in enumerate(lines): + if l and not l.isspace(): + non_whitespace_lines.append(i) + imin = min(non_whitespace_lines) + imax = max(non_whitespace_lines) + + return '\n'.join(lines[imin: imax+1]) + + +def is_output_node(node): + """ + Determine whether a RedBaron node may be expected to generate output. + + Parameters + ---------- + node : + a RedBaron Node. + + Returns + ------- + bool + True if node may be expected to generate output, otherwise False. + """ + if node.type == 'print': + return True + + # lines with the following signatures and function names may generate output + output_signatures = [ + ('name', 'name', 'call'), + ('name', 'name', 'name', 'call') + ] + output_functions = [ + 'setup', 'run_model', 'run_driver', + 'check_partials', 'check_totals', + 'list_inputs', 'list_outputs', 'list_problem_vars' + ] + + if node.type == 'atomtrailers' and len(node.value) in (3, 4): + sig = [] + for val in node.value: + sig.append(val.type) + func_name = node.value[-2].value + if tuple(sig) in output_signatures and func_name in output_functions: + return True + + return False + + +def split_source_into_input_blocks(src): + """ + Split source into blocks; the splits occur at inserted prints. + + Parameters + ---------- + src : str + Input code. + + Returns + ------- + list + List of input code sections. + """ + input_blocks = [] + current_block = [] + + for line in src.splitlines(): + if 'print(">>>>>' in line: + tag = line.split('"')[1] + code = '\n'.join(current_block) + input_blocks.append(InputBlock(code, tag)) + current_block = [] + else: + current_block.append(line) + + if len(current_block) > 0: + # final input block, with no associated output + code = '\n'.join(current_block) + input_blocks.append(InputBlock(code, '')) + + return input_blocks + + +def insert_output_start_stop_indicators(src): + """ + Insert identifier strings so that output can be segregated from input. + + Parameters + ---------- + src : str + String containing input and output lines. + + Returns + ------- + str + String with output demarked. + """ + lines = src.split('\n') + print_producing = [ + 'print(', + '.setup(', + '.run_model(', + '.run_driver(', + '.check_partials(', + '.check_totals(', + '.list_inputs(', + '.list_outputs(', + '.list_sources(', + '.list_source_vars(', + '.list_problem_vars(', + '.list_cases(', + '.list_model_options(', + '.list_solver_options(', + ] + + newlines = [] + input_block_number = 0 + in_try = False + in_continuation = False + head_indent = '' + for line in lines: + newlines.append(line) + + # Check if we are concluding a continuation line. + if in_continuation: + line = line.rstrip() + if not (line.endswith(',') or line.endswith('\\') or line.endswith('(')): + newlines.append('%sprint(">>>>>%d")' % (head_indent, input_block_number)) + input_block_number += 1 + in_continuation = False + + # Don't print if we are in a try block. + if in_try: + if 'except' in line: + in_try = False + continue + + if 'try:' in line: + in_try = True + continue + + # Searching for 'print(' is a little ambiguous. + if 'set_solver_print(' in line: + continue + + for item in print_producing: + if item in line: + indent = ' ' * (len(line) - len(line.lstrip())) + + # Line continuations are a litle tricky. + line = line.rstrip() + if line.endswith(',') or line.endswith('\\') or line.endswith('('): + in_continuation = True + head_indent = indent + break + + newlines.append('%sprint(">>>>>%d")' % (indent, input_block_number)) + input_block_number += 1 + break + + return '\n'.join(newlines) + + +def consolidate_input_blocks(input_blocks, output_blocks): + """ + Merge any input blocks for which there is no corresponding output + with subsequent blocks that do have output. + + Remove any leading and trailing blank lines from all input blocks. + """ + new_input_blocks = [] + new_block = '' + + for (code, tag) in input_blocks: + if tag not in output_blocks: + # no output, add to new consolidated block + if new_block and not new_block.endswith('\n'): + new_block += '\n' + new_block += code + elif new_block: + # add current input to new consolidated block and save + if new_block and not new_block.endswith('\n'): + new_block += '\n' + new_block += code + new_block = remove_leading_trailing_whitespace_lines(new_block) + new_input_blocks.append(InputBlock(new_block, tag)) + new_block = '' + else: + # just strip leading/trailing from input block + code = remove_leading_trailing_whitespace_lines(code) + new_input_blocks.append(InputBlock(code, tag)) + + # trailing input with no corresponding output + if new_block: + new_block = remove_leading_trailing_whitespace_lines(new_block) + new_input_blocks.append(InputBlock(new_block, '')) + + return new_input_blocks + + +def extract_output_blocks(run_output): + """ + Identify and extract outputs from source. + + Parameters + ---------- + run_output : str or list of str + Source code with outputs. + + Returns + ------- + dict + output blocks keyed on tags like ">>>>>4" + """ + if isinstance(run_output, list): + return sync_multi_output_blocks(run_output) + + output_blocks = {} + output_block = None + + for line in run_output.splitlines(): + if output_block is None: + output_block = [] + if line[:5] == '>>>>>': + output = ('\n'.join(output_block)).strip() + if output: + output_blocks[line] = output + output_block = None + else: + output_block.append(line) + + if output_block is not None: + # It is possible to have trailing output + # (e.g. if the last print_producing statement is in a try block) + output_blocks['Trailing'] = output_block + + return output_blocks + + +def strip_decorators(src): + """ + Remove any decorators from the source code of the method or function. + + Parameters + ---------- + src : str + Source code + + Returns + ------- + str + Source code minus any decorators + """ + class Parser(ast.NodeVisitor): + def __init__(self): + self.function_node = None + + def visit_FunctionDef(self, node): + self.function_node = node + + def get_function(self): + return self.function_node + + tree = ast.parse(src) + parser = Parser() + parser.visit(tree) + + # get node for the first function + function_node = parser.get_function() + if not function_node.decorator_list: # no decorators so no changes needed + return src + + # Unfortunately, the ast library, for a decorated function, returns the line + # number for the first decorator when asking for the line number of the function + # So using the line number for the argument for of the function, which is always + # correct. But we assume that the argument is on the same line as the function. + # We also assume there IS an argument. If not, we raise an error. + if function_node.args.args: + function_lineno = function_node.args.args[0].lineno + else: + raise RuntimeError("Cannot determine line number for decorated function without args") + lines = src.splitlines() + + undecorated_src = '\n'.join(lines[function_lineno - 1:]) + + return undecorated_src + + +def strip_header(src): + """ + Directly manipulating function text to strip header, usually or maybe always just the + "def" lines for a method or function. + + This function assumes that the docstring and header, if any, have already been removed. + + Parameters + ---------- + src : str + source code + """ + lines = src.split('\n') + first_len = None + for i, line in enumerate(lines): + n1 = len(line) + newline = line.lstrip() + tab = n1 - len(newline) + if first_len is None: + first_len = tab + elif n1 == 0: + continue + if tab != first_len: + return '\n'.join(lines[i:]) + + return '' + + +def dedent(src): + """ + Directly manipulating function text to remove leading whitespace. + + Parameters + ---------- + src : str + source code + """ + + lines = src.split('\n') + if lines: + for i, line in enumerate(lines): + lstrip = line.lstrip() + if lstrip: # keep going if first line(s) are blank. + tab = len(line) - len(lstrip) + return '\n'.join(l[tab:] for l in lines[i:]) + return '' + + +def sync_multi_output_blocks(run_output): + """ + Combine output from different procs into the same output blocks. + + Parameters + ---------- + run_output : list of dict + List of outputs from individual procs. + + Returns + ------- + dict + Synced output blocks from all procs. + """ + if run_output: + # for each proc's run output, get a dict of output blocks keyed by tag + proc_output_blocks = [extract_output_blocks(outp) for outp in run_output] + + synced_blocks = {} + + for i, outp in enumerate(proc_output_blocks): + for tag in outp: + if str(outp[tag]).strip(): + if tag in synced_blocks: + synced_blocks[tag] += "(rank %d) %s\n" % (i, outp[tag]) + else: + synced_blocks[tag] = "(rank %d) %s\n" % (i, outp[tag]) + + return synced_blocks + else: + return {} + + +def run_code(code_to_run, path, module=None, cls=None, shows_plot=False, imports_not_required=False): + """ + Run the given code chunk and collect the output. + """ + + skipped = False + failed = False + + if cls is None: + use_mpi = False + else: + try: + import mpi4py + except ImportError: + use_mpi = False + else: + N_PROCS = getattr(cls, 'N_PROCS', 1) + use_mpi = N_PROCS > 1 + + try: + # use subprocess to run code to avoid any nasty interactions between codes + + # Move to the test directory in case there are files to read. + save_dir = os.getcwd() + + if module is None: + code_dir = os.path.dirname(os.path.abspath(path)) + else: + code_dir = os.path.dirname(os.path.abspath(module.__file__)) + + os.chdir(code_dir) + + if use_mpi: + env = os.environ.copy() + + # output will be written to one file per process + env['USE_PROC_FILES'] = '1' + + env['OPENMDAO_CURRENT_MODULE'] = module.__name__ + env['OPENMDAO_CODE_TO_RUN'] = code_to_run + + p = subprocess.Popen(['mpirun', '-n', str(N_PROCS), sys.executable, _sub_runner], + env=env) + p.wait() + + # extract output blocks from all output files & merge them + output = [] + for i in range(N_PROCS): + with open('%d.out' % i) as f: + output.append(f.read()) + os.remove('%d.out' % i) + + elif shows_plot: + if module is None: + # write code to a file so we can run it. + fd, code_to_run_path = tempfile.mkstemp() + with os.fdopen(fd, 'w') as tmp: + tmp.write(code_to_run) + try: + p = subprocess.Popen([sys.executable, code_to_run_path], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=os.environ) + output, _ = p.communicate() + if p.returncode != 0: + failed = True + + finally: + os.remove(code_to_run_path) + else: + env = os.environ.copy() + + env['OPENMDAO_CURRENT_MODULE'] = module.__name__ + env['OPENMDAO_CODE_TO_RUN'] = code_to_run + + p = subprocess.Popen([sys.executable, _sub_runner], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) + output, _ = p.communicate() + if p.returncode != 0: + failed = True + + output = output.decode('utf-8', 'ignore') + else: + # just exec() the code for serial tests. + + # capture all output + stdout = sys.stdout + stderr = sys.stderr + strout = StringIO() + sys.stdout = strout + sys.stderr = strout + + # We need more precision from numpy + with printoptions(precision=8): + + if module is None: + globals_dict = { + '__file__': path, + '__name__': '__main__', + '__package__': None, + '__cached__': None, + } + else: + if imports_not_required: + # code does not need to include all imports + # Get from module + globals_dict = module.__dict__ + else: + globals_dict = {} + + try: + exec(code_to_run, globals_dict) + except Exception as err: + # for actual errors, print code (with line numbers) to facilitate debugging + if not isinstance(err, unittest.SkipTest): + for n, line in enumerate(code_to_run.split('\n')): + print('%4d: %s' % (n, line), file=stderr) + raise + finally: + sys.stdout = stdout + sys.stderr = stderr + + output = strout.getvalue() + + except subprocess.CalledProcessError as e: + output = e.output.decode('utf-8', 'ignore') + # Get a traceback. + if 'raise unittest.SkipTest' in output: + reason_for_skip = output.splitlines()[-1][len('unittest.case.SkipTest: '):] + output = reason_for_skip + skipped = True + else: + output = "Running of embedded code {} in docs failed due to: \n\n{}".format(path, output) + failed = True + except unittest.SkipTest as skip: + output = str(skip) + skipped = True + except Exception as exc: + output = "Running of embedded code {} in docs failed due to: \n\n{}".format(path, traceback.format_exc()) + failed = True + finally: + os.chdir(save_dir) + + return skipped, failed, output + + +def get_skip_output_node(output): + output = "Test skipped because " + output + return skipped_or_failed_node(text=output, number=1, kind="skipped") + + +def get_interleaved_io_nodes(input_blocks, output_blocks): + """ + Parameters + ---------- + input_blocks : list of tuple + Each tuple is a block of code and the tag marking it's output. + + output_blocks : dict + Output blocks keyed on tag. + """ + nodelist = [] + n = 1 + + for (code, tag) in input_blocks: + input_node = nodes.literal_block(code, code) + input_node['language'] = 'python' + nodelist.append(input_node) + if tag and tag in output_blocks: + outp = cgiesc.escape(output_blocks[tag]) + if (outp.strip()): + output_node = in_or_out_node(kind="Out", number=n, text=outp) + nodelist.append(output_node) + n += 1 + + if 'Trailing' in output_blocks: + output_node = in_or_out_node(kind="Out", number=n, text=output_blocks['Trailing']) + nodelist.append(output_node) + + return nodelist + + +def get_output_block_node(output_blocks): + output_block = '\n'.join([cgiesc.escape(ob) for ob in output_blocks]) + return in_or_out_node(kind="Out", number=1, text=output_block) diff --git a/mphys/doc_utils/_utils/generate_sourcedocs.py b/mphys/doc_utils/_utils/generate_sourcedocs.py new file mode 100644 index 00000000..155b44da --- /dev/null +++ b/mphys/doc_utils/_utils/generate_sourcedocs.py @@ -0,0 +1,162 @@ +import os +from openmdao.docs.config_params import IGNORE_LIST + +# this function is used to create the entire directory structure +# of our source docs, as well as writing out each individual rst file. + + +def generate_docs(dir, top, packages, project_name='openmdao'): + """ + generate_docs + + Can supply a project name other than `openmdao` to use this function + with other projects. + """ + index_top = """:orphan: + +.. _source_documentation: + +******************** +Source Docs +******************** + +.. toctree:: + :titlesonly: + :maxdepth: 1 + +""" + package_top = """ +.. toctree:: + :maxdepth: 1 + +""" + + ref_sheet_bottom = """ + :members: + :undoc-members: + :special-members: __init__, __contains__, __iter__, __setitem__, __getitem__ + :show-inheritance: + :inherited-members: + +.. toctree:: + :maxdepth: 1 +""" + + ref_sheet_bottom_noinherit = """ + :members: + :undoc-members: + :special-members: __init__, __contains__, __iter__, __setitem__, __getitem__ + +.. toctree:: + :maxdepth: 1 +""" + + # file_wrap inherits from pyparsing, which has some formatting issues when + # generating src docs, so this just turns off the generation of inherited stuff + no_inherit = set([ + 'file_wrap.py', + ]) + docs_dir = os.path.dirname(dir) + + doc_dir = os.path.join(docs_dir, "_srcdocs") + if os.path.isdir(doc_dir): + import shutil + shutil.rmtree(doc_dir) + + if not os.path.isdir(doc_dir): + os.mkdir(doc_dir) + + packages_dir = os.path.join(doc_dir, "packages") + if not os.path.isdir(packages_dir): + os.mkdir(packages_dir) + + # look for directories in the top level, one up from docs + # those directories will be the packages that + # auto-generate at the top-level index.rst file for _srcdocs, based on + # the packages that are passed in, which are set in conf.py. + + # to improve the order in which the user sees the source docs, + # order the packages in this list explicitly. Any new ones that + # are detected will show up at the end of the list. + + # everything in openmdao dir that isn't discarded is appended as a source package. + for listing in os.listdir(os.path.join(top)): + if os.path.isdir(os.path.join("..", listing)): + if listing not in IGNORE_LIST and listing not in packages: + packages.append(listing) + + # begin writing the '_srcdocs/index.rst' file at mid level. + index_filename = os.path.join(doc_dir, "index.rst") + index = open(index_filename, "w") + index.write(index_top) + + # auto-generate package header files (e.g. 'openmdao.core.rst') + for package in packages: + # a package is e.g. openmdao.core, that contains source files + # a sub_package, is a src file, e.g. openmdao.core.component + sub_packages = [] + package_filename = os.path.join(packages_dir, + project_name + "." + package + ".rst") + package_name = project_name + "." + package + + # the sub_listing is going into each package dir and listing what's in it + for sub_listing in sorted(os.listdir(os.path.join(dir, package.replace('.','/')))): + # don't want to catalog files twice, nor use init files nor test dir + if (os.path.isdir(sub_listing) and sub_listing != "tests") or \ + (sub_listing.endswith(".py") and not sub_listing.startswith('_')): + # just want the name of e.g. dataxfer not dataxfer.py + sub_packages.append(sub_listing.rsplit('.')[0]) + + if len(sub_packages) > 0: + # continue to write in the top-level index file. + # only document non-empty packages -- to avoid errors + # (e.g. at time of writing, doegenerators, drivers, are empty dirs) + + # specifically don't use os.path.join here. Even windows wants the + # stuff in the file to have fwd slashes. + index.write(" packages/" + project_name + "." + package + "\n") + + # make subpkg directory (e.g. _srcdocs/packages/core) for ref sheets + package_dir = os.path.join(packages_dir, package) + os.mkdir(package_dir) + + # create/write a package index file: (e.g. "_srcdocs/packages/openmdao.core.rst") + package_file = open(package_filename, "w") + package_file.write(package_name + "\n") + package_file.write("-" * len(package_name) + "\n") + package_file.write(package_top) + + for sub_package in sub_packages: + SKIP_SUBPACKAGES = ['__pycache__'] + # this line writes subpackage name e.g. "core/component.py" + # into the corresponding package index file (e.g. "openmdao.core.rst") + if sub_package not in SKIP_SUBPACKAGES: + # specifically don't use os.path.join here. Even windows wants the + # stuff in the file to have fwd slashes. + package_file.write(" " + package + "/" + sub_package + "\n") + + # creates and writes out one reference sheet (e.g. core/component.rst) + ref_sheet_filename = os.path.join(package_dir, sub_package + ".rst") + ref_sheet = open(ref_sheet_filename, "w") + + # get the meat of the ref sheet code done + filename = sub_package + ".py" + ref_sheet.write(".. index:: " + filename + "\n\n") + ref_sheet.write(".. _" + package_name + "." + + filename + ":\n\n") + ref_sheet.write(filename + "\n") + ref_sheet.write("-" * len(filename) + "\n\n") + ref_sheet.write(".. automodule:: " + package_name + "." + sub_package) + + # finish and close each reference sheet. + if filename in no_inherit: + ref_sheet.write(ref_sheet_bottom_noinherit) + else: + ref_sheet.write(ref_sheet_bottom) + ref_sheet.close() + + # finish and close each package file + package_file.close() + + # finish and close top-level index file + index.close() diff --git a/mphys/doc_utils/_utils/patch.py b/mphys/doc_utils/_utils/patch.py new file mode 100644 index 00000000..ba386895 --- /dev/null +++ b/mphys/doc_utils/_utils/patch.py @@ -0,0 +1,138 @@ +from numpydoc.docscrape_sphinx import SphinxDocString +from numpydoc.docscrape import NumpyDocString, Reader, ParseError +import textwrap + +# start off running the monkeypatch to keep options/parameters +# usable in docstring for autodoc. + + +def __init__(self, docstring, config={}): + """ + init + """ + orig_docstring = docstring + docstring = textwrap.dedent(docstring).split('\n') + + self._doc = Reader(docstring) + self._parsed_data = { + 'Signature': '', + 'Summary': [''], + 'Extended Summary': [], + 'Parameters': [], + 'Options': [], + 'Returns': [], + 'Yields': [], + 'Raises': [], + 'Warns': [], + 'Other Parameters': [], + 'Attributes': [], + 'Methods': [], + 'See Also': [], + 'Notes': [], + 'Warnings': [], + 'References': '', + 'Examples': '', + 'index': {} + } + + try: + self._parse() + except ParseError as e: + e.docstring = orig_docstring + raise + + # In creation of docs, remove private Attributes (beginning with '_') + # with a crazy list comprehension + self._parsed_data["Attributes"][:] = [att for att in self._parsed_data["Attributes"] + if not att[0].startswith('_')] + + +def _parse(self): + """ + parse + """ + self._doc.reset() + self._parse_summary() + + sections = list(self._read_sections()) + section_names = set([section for section, content in sections]) + + has_returns = 'Returns' in section_names + has_yields = 'Yields' in section_names + # We could do more tests, but we are not. Arbitrarily. + if has_returns and has_yields: + msg = 'Docstring contains both a Returns and Yields section.' + raise ValueError(msg) + + for (section, content) in sections: + if not section.startswith('..'): + section = (s.capitalize() for s in section.split(' ')) + section = ' '.join(section) + if self.get(section): + msg = ("The section %s appears twice in the docstring." % + section) + raise ValueError(msg) + + if section in ('Parameters', 'Options', 'Params', 'Returns', 'Yields', 'Raises', + 'Warns', 'Other Parameters', 'Attributes', + 'Methods'): + self[section] = self._parse_param_list(content) + elif section.startswith('.. index::'): + self['index'] = self._parse_index(section, content) + elif section == 'See Also': + self['See Also'] = self._parse_see_also(content) + else: + self[section] = content + + +def __str__(self, indent=0, func_role="obj"): + """ + our own __str__ + """ + out = [] + out += self._str_signature() + out += self._str_index() + [''] + out += self._str_summary() + out += self._str_extended_summary() + out += self._str_param_list('Parameters') + out += self._str_options('Options') + out += self._str_returns() + for param_list in ('Other Parameters', 'Raises', 'Warns'): + out += self._str_param_list(param_list) + out += self._str_warnings() + out += self._str_see_also(func_role) + out += self._str_section('Notes') + out += self._str_references() + out += self._str_examples() + for param_list in ('Attributes', 'Methods'): + out += self._str_member_list(param_list) + out = self._str_indent(out, indent) + return '\n'.join(out) + + +def _str_options(self, name): + """ + """ + out = [] + if self[name]: + out += self._str_field_list(name) + out += [''] + for param, param_type, desc in self[name]: + if param_type: + out += self._str_indent(['**%s** : %s' % (param.strip(), + param_type)]) + else: + out += self._str_indent(['**%s**' % param.strip()]) + if desc: + out += [''] + out += self._str_indent(desc, 8) + out += [''] + return out + + +# Do the actual patch switchover to these local versions +def do_monkeypatch(): + NumpyDocString.__init__ = __init__ + SphinxDocString._str_options = _str_options + SphinxDocString._parse = _parse + SphinxDocString.__str__ = __str__ diff --git a/mphys/doc_utils/_utils/preprocess_tags.py b/mphys/doc_utils/_utils/preprocess_tags.py new file mode 100644 index 00000000..24f0f500 --- /dev/null +++ b/mphys/doc_utils/_utils/preprocess_tags.py @@ -0,0 +1,95 @@ +# A script that finds occurrences of the .. tags:: directive +# and sets up the structure of the tags directory. One file +# is created for each subject tag, that file contains links to +# each instance of the tag throughout the docs. + +import os +import shutil +import re + + +def make_tagdir(): + # Clean up tagdir, create tagdir, return tagdir + dir = os.path.dirname(__file__) + tagdir = os.path.join(dir, "../tags") + + if os.path.isdir(tagdir): + shutil.rmtree(tagdir) + + os.mkdir(tagdir) + + return tagdir + + +def make_tagfiles(docdirs, tagdir): + # Pull tags from each file, then make a file + # for each tag, containing all links to tagged files. + for docdir in docdirs: + for dirpath, dirnames, filenames in os.walk(docdir): + for filename in filenames: + # The path to the file being read for tags + sourcefile = os.path.join(dirpath, filename) + # A file object for the file being read for tags + with open(sourcefile, 'r', encoding="latin-1") as textfile: + # The text of the entire sourcefile + filetext = textfile.read() + # Pull all tag directives out of the filetext + matches = re.findall(".. tags::.*$", filetext) + + # For every instance of tag directive, get a list of tags + for match in matches: + match = match.lstrip(".. tags::") + taglist = match.split(", ") + + for tag in taglist: + filepath = os.path.join(tagdir, (tag + ".rst")) + + # If the tagfile doesn't exist, let's put in a header + if not os.path.exists(filepath): + tagfilelabel = ".. _" + tag + ": \n" + tagfileheader = """ +========================= +%s +========================= + .. toctree:: +""" % tag + + + # Write the header for this tag's file. + with open(filepath, 'a') as tagfile: + tagfile.write(tagfilelabel) + tagfile.write(tagfileheader) + # Write a link into an existing tagfile. + with open(filepath, 'a') as tagfile: + tagfile.write(" ../%s\n" % (sourcefile)) + + +def make_tagindex(tagdir): + # Once all the files exist, create a simple index.rst file + indexfile = tagdir + "/index.rst" + + with open(indexfile, 'a') as index: + index.write(""" +:orphan: + +================ +Tags in OpenMDAO +================ +.. toctree:: + :maxdepth: 1 + :glob: + + ./* + """) + + +def tag(): + # Set the directories in which to find tags + # Let's make tags for dirs in this dr that don't start with an underscore. + docdirs = [x for x in os.listdir('.') if os.path.isdir(x) and not x.startswith('_')] + tagdir = make_tagdir() + make_tagfiles(docdirs, tagdir) + make_tagindex(tagdir) + +if __name__ == "__main__": + tag() diff --git a/mphys/doc_utils/_utils/run_sub.py b/mphys/doc_utils/_utils/run_sub.py new file mode 100644 index 00000000..9b671b5b --- /dev/null +++ b/mphys/doc_utils/_utils/run_sub.py @@ -0,0 +1,41 @@ +""" +This is used by our doc build system to execute a code chunk in a subprocess while giving that code chunk +access to its containing module's globals. +""" + +import os +import sys +import importlib +import traceback +import numpy as np +from openmdao.utils.general_utils import printoptions + +if __name__ == '__main__': + import openmdao.utils.mpi # this will activate use_proc_files + try: + module_path = os.environ.get("OPENMDAO_CURRENT_MODULE", "").strip() + if module_path: + stdout_save = sys.stdout + + # send any output to dev/null during the import so it doesn't clutter our embedded code output + with open(os.devnull, "w") as f: + sys.stdout = f + + mod = importlib.import_module(module_path) + + sys.stdout = stdout_save + else: + raise RuntimeError("OPENMDAO_CURRENT_MODULE was not specified.") + + code_to_run = os.environ.get("OPENMDAO_CODE_TO_RUN", "").strip() + if not code_to_run: + raise RuntimeError("OPENMDAO_CODE_TO_RUN has not been set.") + + with printoptions(precision=8): + exec(code_to_run, mod.__dict__) + + except Exception: + traceback.print_exc() + sys.exit(-1) + + sys.exit(0) diff --git a/mphys/doc_utils/_utils/upload_doc_version.py b/mphys/doc_utils/_utils/upload_doc_version.py new file mode 100644 index 00000000..92e687dd --- /dev/null +++ b/mphys/doc_utils/_utils/upload_doc_version.py @@ -0,0 +1,99 @@ +import sys +import subprocess +import pipes +import os + + +def get_tag_info(): + """ + Return the latest git tag, meaning, highest numerically, as a string, and the associated commit ID. + """ + # using a pattern to only grab tags that are in version format "X.Y.Z" + git_versions = subprocess.Popen(['git', 'tag', '-l', '*.*.*'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + cmd_out, cmd_err = git_versions.communicate() + + cmd_out = cmd_out.decode('utf8') + # take the output of git tag -l *.*.*, and split it from one string into a list. + version_tags = cmd_out.split() + + if not version_tags: + raise Exception('No tags found in repository') + + # use sort to put the versions list in order from lowest to highest + version_tags.sort(key=lambda s: [int(u) for u in s.split('.')]) + + # grab the highest tag that this repo knows about + latest_tag = version_tags[-1] + + cmd = subprocess.Popen(['git', 'rev-list', '-1', latest_tag, '-s'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + cmd_out, cmd_err = cmd.communicate() + + cmd_out = cmd_out.decode('utf8') + commit_id = cmd_out.strip() + + return latest_tag, commit_id + + +def get_commit_info(): + """ + Return the commit number of the most recent git commit as a string. + """ + git_commit = subprocess.Popen(['git', 'show', '--pretty=oneline', '-s'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + cmd_out, cmd_err = git_commit.communicate() + + cmd_out = cmd_out.decode('utf8') + commit_id = cmd_out.split()[0] + + return commit_id + + +def get_doc_version(): + """ + Returns either a git commit ID, or a X.Y.Z release number, + and an indicator if this is a release or not + """ + release_tag, release_commit = get_tag_info() + + current_commit = get_commit_info() + + if current_commit == release_commit: + return release_tag, 1 + else: + return current_commit, 0 + + +def upload_doc_version(destination): + """ + Upload properly-named docs. + + Parameters + ---------- + destination : str + The destination for the documentation, [USER@]HOST:DIRECTORY + """ + name, rel = get_doc_version() + + # if release, send to version-numbered dir + if rel: + destination += name + # if not release, it's a "daily build," send to latest + else: + destination += "latest" + + # execute the rsync to upload docs + cmd = "rsync -r --delete-after -v _build/html/* " + destination + status = subprocess.call(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, shell=True) + + if status == 0: + print("Uploaded documentation for", name if rel else "latest") + return True + else: + raise Exception('Doc transfer failed.') + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Destination required, [USER@]HOST:DIRECTORY") + else: + upload_doc_version(sys.argv[1]) diff --git a/mphys/doc_utils/config_params.py b/mphys/doc_utils/config_params.py new file mode 100644 index 00000000..7c1cc68f --- /dev/null +++ b/mphys/doc_utils/config_params.py @@ -0,0 +1,4 @@ +MOCK_MODULES = ['h5py', 'petsc4py', 'mpi4py', 'pyoptsparse', 'pyDOE2',] +IGNORE_LIST = [ + 'docs', 'tests', 'devtools', '__pycache__', 'code_review', 'test_suite', 'utils' + ] From 5ed31b442d0db8f8198dd13529796f029cb938d9 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 12:56:05 -0400 Subject: [PATCH 04/15] Break up CI steps --- .github/workflows/unit_tests_and_docs.yml | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 35e659b0..350f9cbd 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -69,21 +69,16 @@ jobs: echo "List installed packages/versions"; echo "============================================================="; conda list; - echo "============================================================="; - echo "Run the tests."; - echo "============================================================="; + - name: 'Run unit tests' + run: | cd tests/unit_tests testflo - echo "============================================================="; - echo "Build the docs."; - echo "============================================================="; - cd ../../docs + - name: 'Build docs' + run: | + cd docs make html cd _build/html zip -r ./mphys.zip . - echo "============================================================="; - echo "Operations Completed."; - echo "============================================================="; - name: 'Upload Docs' uses: actions/upload-artifact@v2 with: From 5a9cd31c0b7310874bc52f253241f211691dfbb6 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 12:56:17 -0400 Subject: [PATCH 05/15] Remove example ci file --- .github/workflows/main.yml | 36 ------------------------------------ 1 file changed, 36 deletions(-) delete mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 30a4bc10..00000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,36 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CI - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - branches: [ main ] - pull_request: - branches: [ main ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-latest - - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - # Runs a single command using the runners shell - - name: Run a one-line script - run: echo Hello, world! - - # Runs a set of commands using the runners shell - - name: Run a multi-line script - run: | - echo Add other actions to build, - echo test, and deploy your project. From e1fdae997cd2b0c1b1b4324b16834b002333e684 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:18:06 -0400 Subject: [PATCH 06/15] verbose install of testflo to see why it wasn't found --- .github/workflows/unit_tests_and_docs.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 350f9cbd..2e12c05c 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -62,7 +62,7 @@ jobs: git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; - pip install testflo; + pip install -v testflo; cd ../mphys; pip install -e . echo "============================================================="; @@ -71,10 +71,14 @@ jobs: conda list; - name: 'Run unit tests' run: | + source $CONDA/etc/profile.d/conda.sh; + echo $CONDA/bin >> $GITHUB_PATH; cd tests/unit_tests testflo - name: 'Build docs' run: | + source $CONDA/etc/profile.d/conda.sh; + echo $CONDA/bin >> $GITHUB_PATH; cd docs make html cd _build/html From 7344ed4f60bc08c61a01fc92c34d0e69a747ce0f Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:30:15 -0400 Subject: [PATCH 07/15] Build testflo verbose before OM --- .github/workflows/unit_tests_and_docs.yml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 2e12c05c..0d7dc2c5 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -47,22 +47,13 @@ jobs: conda install -c anaconda mpi4py -q -y; conda install -c conda-forge petsc=3.12 petsc4py -q -y; echo "============================================================="; - echo "Install pyoptsparse"; - echo "============================================================="; - git clone -q https://github.com/OpenMDAO/build_pyoptsparse; - cd build_pyoptsparse; - chmod 755 ./build_pyoptsparse.sh; - ./build_pyoptsparse.sh -b "v2.1.5"; - cd ..; - export LD_LIBRARY_PATH=$HOME/ipopt/lib; - echo "============================================================="; echo "Install OpenMDAO"; echo "============================================================="; cd ..; + pip install -v testflo; git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; - pip install -v testflo; cd ../mphys; pip install -e . echo "============================================================="; From 547623fe823bb50ad8319b6ff7b567f0d5b55340 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:35:38 -0400 Subject: [PATCH 08/15] More CI debugging --- .github/workflows/unit_tests_and_docs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 0d7dc2c5..d1e2781d 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -51,6 +51,8 @@ jobs: echo "============================================================="; cd ..; pip install -v testflo; + which testflo + echo "TESTFLO" git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; From 030cdbb873be83455d6c88ac2d663f6d291ec060 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:38:41 -0400 Subject: [PATCH 09/15] Activate conda in subsequent steps --- .github/workflows/unit_tests_and_docs.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index d1e2781d..0b181810 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -51,8 +51,6 @@ jobs: echo "============================================================="; cd ..; pip install -v testflo; - which testflo - echo "TESTFLO" git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; @@ -64,14 +62,12 @@ jobs: conda list; - name: 'Run unit tests' run: | - source $CONDA/etc/profile.d/conda.sh; - echo $CONDA/bin >> $GITHUB_PATH; + conda activate OpenMDAO; cd tests/unit_tests testflo - name: 'Build docs' run: | - source $CONDA/etc/profile.d/conda.sh; - echo $CONDA/bin >> $GITHUB_PATH; + conda activate OpenMDAO; cd docs make html cd _build/html From 922859bc1549db27e4f7895c034b6ebff048146b Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:43:36 -0400 Subject: [PATCH 10/15] conda init --- .github/workflows/unit_tests_and_docs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 0b181810..84524c42 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -62,11 +62,13 @@ jobs: conda list; - name: 'Run unit tests' run: | + conda init /usr/bin/bash conda activate OpenMDAO; cd tests/unit_tests testflo - name: 'Build docs' run: | + conda init /usr/bin/bash conda activate OpenMDAO; cd docs make html From d131ed768781cdd8bdb6865020ba3b904ede0dae Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:49:30 -0400 Subject: [PATCH 11/15] only bash not full path --- .github/workflows/unit_tests_and_docs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 84524c42..08881718 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -62,13 +62,13 @@ jobs: conda list; - name: 'Run unit tests' run: | - conda init /usr/bin/bash + conda init bash conda activate OpenMDAO; cd tests/unit_tests testflo - name: 'Build docs' run: | - conda init /usr/bin/bash + conda init bash conda activate OpenMDAO; cd docs make html From da45a7d3108e389781dd8f10b42fde8a7df3e7bc Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:53:41 -0400 Subject: [PATCH 12/15] Merge steps --- .github/workflows/unit_tests_and_docs.yml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 08881718..8f4e5e4f 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -25,7 +25,7 @@ jobs: with: python-version: 3.8 - - name: Install OpenMDAO Stack + - name: Install, unit test, docs run: | echo "============================================================="; echo "Run #${GITHUB_RUN_NUMBER}"; @@ -60,17 +60,9 @@ jobs: echo "List installed packages/versions"; echo "============================================================="; conda list; - - name: 'Run unit tests' - run: | - conda init bash - conda activate OpenMDAO; cd tests/unit_tests testflo - - name: 'Build docs' - run: | - conda init bash - conda activate OpenMDAO; - cd docs + cd ../../docs make html cd _build/html zip -r ./mphys.zip . From 7186dc36734132ad0cff2126ccb1bc3e8fd3b7b0 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 13:58:39 -0400 Subject: [PATCH 13/15] pip install redbaron --- .github/workflows/unit_tests_and_docs.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 8f4e5e4f..85fd7129 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -50,7 +50,8 @@ jobs: echo "Install OpenMDAO"; echo "============================================================="; cd ..; - pip install -v testflo; + pip install testflo; + pip intall redbaron git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; From db9c5f2fca0af323b569de116b17be061e67c419 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 14:01:37 -0400 Subject: [PATCH 14/15] Spelling --- .github/workflows/unit_tests_and_docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index 85fd7129..c0bec9c3 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -51,7 +51,7 @@ jobs: echo "============================================================="; cd ..; pip install testflo; - pip intall redbaron + pip install redbaron git clone -q https://github.com/OpenMDAO/OpenMDAO; cd OpenMDAO; pip install .[all]; From 3177dd72b377d5444d31d45a2234692cc063e079 Mon Sep 17 00:00:00 2001 From: Kevin Jacobson Date: Thu, 5 Aug 2021 14:15:15 -0400 Subject: [PATCH 15/15] Rename workflow --- .github/workflows/unit_tests_and_docs.yml | 4 ++-- README.md | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/unit_tests_and_docs.yml b/.github/workflows/unit_tests_and_docs.yml index c0bec9c3..e4a7842f 100644 --- a/.github/workflows/unit_tests_and_docs.yml +++ b/.github/workflows/unit_tests_and_docs.yml @@ -13,7 +13,7 @@ on: jobs: # This job is called test_docs. - test_docs: + unit_test_and_docs: # Run on Ubuntu runs-on: ubuntu-latest @@ -25,7 +25,7 @@ jobs: with: python-version: 3.8 - - name: Install, unit test, docs + - name: Install, run unit test, build docs run: | echo "============================================================="; echo "Run #${GITHUB_RUN_NUMBER}"; diff --git a/README.md b/README.md index 627a32dc..e63a99cb 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ # MPHYS +[![Unit Tests and Docs](https://github.com/OpenMDAO/mphys/actions/workflows/unit_tests_and_docs.yml/badge.svg)](https://github.com/OpenMDAO/mphys/actions/workflows/unit_tests_and_docs.yml) + MPHYS is a framework for coupling high-fidelity physics though OpenMDAO